You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by pl...@apache.org on 2017/11/28 03:04:02 UTC

[01/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Repository: directory-kerby
Updated Branches:
  refs/heads/has-project 438904f7e -> a8b1c28fa


http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
deleted file mode 100644
index 322eafd..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-
-public class AddPrincipalCmd extends HadminCmd {
-
-    public static final String USAGE = "Usage: add_principal [options] <principal-name>\n"
-            + "\toptions are:\n"
-            + "\t\t[-randkey]\n"
-            + "\t\t[-pw password]"
-            + "\tExample:\n"
-            + "\t\tadd_principal -pw mypassword alice\n";
-
-    public AddPrincipalCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-
-        if (items.length < 2) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        String clientPrincipal = items[items.length - 1];
-        if (!items[1].startsWith("-")) {
-            getHadmin().addPrincipal(clientPrincipal);
-        } else if (items[1].startsWith("-randkey")) {
-            getHadmin().addPrincipal(clientPrincipal);
-        } else if (items[1].startsWith("-pw")) {
-            String password = items[2];
-            getHadmin().addPrincipal(clientPrincipal, password);
-        } else {
-            System.err.println("add_principal cmd format error.");
-            System.err.println(USAGE);
-            return;
-        }
-        System.out.println("Success to add principal :" + clientPrincipal);
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
deleted file mode 100644
index b38f2c7..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-
-public class AddPrincipalsCmd extends HadminCmd {
-    private static final Logger LOG = LoggerFactory.getLogger(AddPrincipalsCmd.class);
-
-    private static final String USAGE = "\nUsage: create_principals [hostRoles-file]\n"
-            + "\t'hostRoles-file' is a file with a hostRoles json string like:\n"
-            + "\t\t{HOSTS: [ {\"name\":\"host1\",\"hostRoles\":\"HDFS\"}, "
-            + "{\"name\":\"host2\",\"hostRoles\":\"HDFS,HBASE\"} ] }\n"
-            + "\tExample:\n"
-            + "\t\tcreate_principals hostroles.txt\n";
-
-    public AddPrincipalsCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        if (items.length != 2) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        File hostRoles = new File(items[1]);
-        if (!hostRoles.exists()) {
-            throw new HasException("HostRoles file is not exists.");
-        }
-        try {
-            BufferedReader reader = new BufferedReader(new FileReader(hostRoles));
-            StringBuilder sb = new StringBuilder();
-            String tempString;
-            while ((tempString = reader.readLine()) != null) {
-                sb.append(tempString);
-            }
-            JSONArray hostArray = new JSONObject(sb.toString()).optJSONArray("HOSTS");
-            for (int i = 0; i < hostArray.length(); i++) {
-                JSONObject host = (JSONObject) hostArray.get(i);
-                String[] roles = host.getString("hostRoles").split(",");
-                for (String role : roles) {
-                    System.out.println(getHadmin().addPrincByRole(host.getString("name"),
-                            role.toUpperCase()));
-                }
-            }
-        } catch (Exception e) {
-            throw new HasException("Failed to execute creating principals, because : " + e.getMessage());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
deleted file mode 100644
index 98458ec..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-
-import java.io.Console;
-import java.util.Scanner;
-
-public class DeletePrincipalCmd extends HadminCmd {
-
-    public static final String USAGE = "Usage: delete_principal <principal-name>\n"
-            + "\tExample:\n"
-            + "\t\tdelete_principal alice\n";
-
-    private Boolean force = false;
-
-    public DeletePrincipalCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        if (items.length < 2) {
-            System.err.println(USAGE);
-            return;
-        }
-        String principal = items[items.length - 1];
-        String reply;
-        Console console = System.console();
-        String prompt = "Are you sure to delete the principal? (yes/no, YES/NO, y/n, Y/N) ";
-        if (console == null) {
-            System.out.println("Couldn't get Console instance, "
-                    + "maybe you're running this from within an IDE. "
-                    + "Use scanner to read password.");
-            Scanner scanner = new Scanner(System.in, "UTF-8");
-            reply = getReply(scanner, prompt);
-        } else {
-            reply = getReply(console, prompt);
-        }
-        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
-            getHadmin().deletePrincipal(principal);
-            System.out.println("Success to delete " + principal);
-        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
-            System.out.println("Principal \"" + principal + "\"  not deleted.");
-        } else {
-            System.err.println("Unknown request, fail to delete the principal.");
-            System.err.println(USAGE);
-        }
-    }
-
-    private String getReply(Scanner scanner, String prompt) {
-        System.out.println(prompt);
-        return scanner.nextLine().trim();
-    }
-
-    private String getReply(Console console, String prompt) {
-        console.printf(prompt);
-        String line = console.readLine();
-        return line;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
deleted file mode 100644
index 66eb5cb..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-
-public class DisableConfigureCmd extends HadminCmd {
-
-    public static final String USAGE = "Usage: enable_configure\n"
-            + "\tExample:\n"
-            + "\t\tenable\n";
-
-    public DisableConfigureCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        getHadmin().setEnableOfConf("false");
-        System.out.println("Set conf disable.");
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
deleted file mode 100644
index f40a6c6..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-
-public class EnableConfigureCmd extends HadminCmd {
-
-    public static final String USAGE = "Usage: enable_configure\n"
-            + "\tExample:\n"
-            + "\t\tenable\n";
-
-    public EnableConfigureCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        getHadmin().setEnableOfConf("true");
-        System.out.println("Set conf enable.");
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
deleted file mode 100644
index c5b130c..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-import org.apache.hadoop.has.server.web.HostRoleType;
-
-import java.io.File;
-
-public class ExportKeytabsCmd extends HadminCmd {
-    private static final String USAGE = "\nUsage: export_keytabs <host> [role]\n"
-            + "\tExample:\n"
-            + "\t\texport_keytabs host1 HDFS\n";
-
-    public ExportKeytabsCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        if (items.length < 2) {
-            System.err.println(USAGE);
-            return;
-        }
-        String host = items[1];
-        if (items.length >= 3) {
-            exportKeytab(host, items[2]);
-            return;
-        }
-        for (HostRoleType r : HostRoleType.values()) {
-            exportKeytab(host, r.getName());
-        }
-    }
-
-    public void exportKeytab(String host, String role) throws HasException {
-        File keytab = new File(role + "-" + host + ".keytab");
-        getHadmin().getKeytabByHostAndRole(host, role, keytab);
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
deleted file mode 100644
index ebaf07f..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-public class GetHostRolesCmd extends HadminCmd {
-    private static final String USAGE = "Usage: get_hostroles\n"
-            + "\tExample:\n"
-            + "\t\tget_hostroles\n";
-
-    public GetHostRolesCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) {
-        getHadmin().getHostRoles();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
deleted file mode 100644
index ed636c8..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-import org.apache.kerby.kerberos.kerb.request.KrbIdentity;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
-
-import java.util.Map;
-
-public class GetPrincipalCmd extends HadminCmd {
-    private static final String USAGE = "Usage: getprinc principalName\n"
-        + "\tExample:\n"
-        + "\t\tgetprinc hello@TEST.COM\"\n";
-
-    public GetPrincipalCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-  @Override
-  public void execute(String[] items) {
-    if (items.length != 2) {
-      System.err.println(USAGE);
-      return;
-    }
-
-    String princName = items[items.length - 1];
-    KrbIdentity identity = null;
-    try {
-      identity = getHadmin().getPrincipal(princName);
-    } catch (HasException e) {
-      System.err.println("Fail to get principal: " + princName + ". " + e.getMessage());
-    }
-
-    if (identity == null) {
-      System.err.println(princName + " doesn't exist\n");
-      System.err.println(USAGE);
-      return;
-    }
-
-    Map<EncryptionType, EncryptionKey> key = identity.getKeys();
-
-    System.out.println(
-        "Principal: " + identity.getPrincipalName() + "\n"
-            + "Expiration data: " + identity.getExpireTime() + "\n"
-            + "Created time: "
-            + identity.getCreatedTime() + "\n"
-            + "KDC flags: " + identity.getKdcFlags() + "\n"
-            + "Key version: " + identity.getKeyVersion() + "\n"
-            + "Number of keys: " + key.size()
-    );
-
-    for (EncryptionType keyType : key.keySet()) {
-      System.out.println("key: " + keyType);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
deleted file mode 100644
index 95ce59f..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-
-public abstract class HadminCmd {
-
-    private LocalHasAdmin hadmin;
-
-    public HadminCmd(LocalHasAdmin hadmin) {
-        this.hadmin = hadmin;
-    }
-
-    protected LocalHasAdmin getHadmin() {
-        return hadmin;
-    }
-
-    /**
-     * Execute the hadmin cmd.
-     * @param input Input cmd to execute
-     */
-    public abstract void execute(String[] input) throws HasException;
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
deleted file mode 100644
index 99e05e2..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-
-import java.io.File;
-import java.util.List;
-
-public class KeytabAddCmd extends HadminCmd {
-    private static final String USAGE =
-        "Usage: ktadd [-k[eytab] keytab] [-q] [-e keysaltlist] [-norandkey] [principal | -glob princ-exp] [...]";
-
-    private static final String DEFAULT_KEYTAB_FILE_LOCATION = "/etc/krb5.keytab";
-
-    public KeytabAddCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) {
-
-        String principal = null;
-        String keytabFileLocation = null;
-        Boolean glob = false;
-
-        //Since commands[0] is ktadd, the initial index is 1.
-        int index = 1;
-        while (index < items.length) {
-            String command = items[index];
-            if (command.equals("-k")) {
-                index++;
-                if (index >= items.length) {
-                    System.err.println(USAGE);
-                    return;
-                }
-                keytabFileLocation = items[index].trim();
-
-            } else if (command.equals("-glob")) {
-                glob = true;
-            } else if (!command.startsWith("-")) {
-                principal = command;
-            }
-            index++;
-        }
-
-        if (keytabFileLocation == null) {
-            keytabFileLocation = DEFAULT_KEYTAB_FILE_LOCATION;
-        }
-        File keytabFile = new File(keytabFileLocation);
-
-        if (principal == null) {
-            System.out.println((glob ? "princ-exp" : "principal") + " not specified!");
-            System.err.println(USAGE);
-            return;
-        }
-
-        try {
-            if (glob) {
-                List<String> principals = getHadmin().getPrincipals(principal);
-                if (principals.size() != 0) {
-                    getHadmin().exportKeytab(keytabFile, principals);
-                }
-            } else {
-                getHadmin().exportKeytab(keytabFile, principal);
-            }
-            System.out.println("Principal export to keytab file : " + keytabFile + " successful .");
-        } catch (HasException e) {
-            System.err.println("Principal \"" + principal + "\" fail to add entry to keytab."
-                    + e.getMessage());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
deleted file mode 100644
index ef9e7f7..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-
-import java.util.List;
-
-public class ListPrincipalsCmd extends HadminCmd {
-    private static final String USAGE = "Usage: list_principals [expression]\n"
-            + "\t'expression' is a shell-style glob expression that can contain the wild-card characters ?, *, and []."
-            + "\tExample:\n"
-            + "\t\tlist_principals [expression]\n";
-
-    public ListPrincipalsCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        if (items.length > 2) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        List<String> principalLists = null;
-
-        if (items.length == 1) {
-            principalLists = getHadmin().getPrincipals();
-        } else {
-            //have expression
-            String exp = items[1];
-            principalLists = getHadmin().getPrincipals(exp);
-        }
-
-        if (principalLists.size() == 0 || principalLists.size() == 1 && principalLists.get(0).isEmpty()) {
-            return;
-        } else {
-            System.out.println("Principals are listed:");
-            for (int i = 0; i < principalLists.size(); i++) {
-                System.out.println(principalLists.get(i));
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
deleted file mode 100644
index 2c0ba20..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-
-import java.io.Console;
-import java.util.Scanner;
-
-public class RenamePrincipalCmd extends HadminCmd {
-    public static final String USAGE = "Usage: rename_principal <old_principal_name>"
-            + " <new_principal_name>\n"
-            + "\tExample:\n"
-            + "\t\trename_principal alice bob\n";
-
-    public RenamePrincipalCmd(LocalHasAdmin hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        if (items.length < 3) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        String oldPrincipalName = items[items.length - 2];
-        String newPrincipalName = items[items.length - 1];
-
-        String reply;
-        Console console = System.console();
-        String prompt = "Are you sure to rename the principal? (yes/no, YES/NO, y/n, Y/N) ";
-        if (console == null) {
-            System.out.println("Couldn't get Console instance, "
-                    + "maybe you're running this from within an IDE. "
-                    + "Use scanner to read password.");
-            Scanner scanner = new Scanner(System.in, "UTF-8");
-            reply = getReply(scanner, prompt);
-        } else {
-            reply = getReply(console, prompt);
-        }
-        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
-            getHadmin().renamePrincipal(oldPrincipalName, newPrincipalName);
-            System.out.println("Success to rename principal : \"" + oldPrincipalName
-                + "\" to \"" + newPrincipalName + "\".");
-        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
-            System.out.println("Principal \"" + oldPrincipalName + "\"  not renamed.");
-        } else {
-            System.err.println("Unknown request, fail to rename the principal.");
-            System.err.println(USAGE);
-        }
-    }
-
-    private String getReply(Scanner scanner, String prompt) {
-        System.out.println(prompt);
-        return scanner.nextLine().trim();
-    }
-
-    private String getReply(Console console, String prompt) {
-        console.printf(prompt);
-        String line = console.readLine();
-        return line;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/HadminLocalTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/HadminLocalTool.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/HadminLocalTool.java
new file mode 100644
index 0000000..d02129a
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/HadminLocalTool.java
@@ -0,0 +1,265 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.AddPrincipalCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.AddPrincipalsCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.DeletePrincipalCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.DisableConfigureCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.EnableConfigureCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.ExportKeytabsCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.GetHostRolesCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.GetPrincipalCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.HadminCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.KeytabAddCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.ListPrincipalsCmd;
+import org.apache.kerby.has.tool.server.hadmin.local.cmd.RenamePrincipalCmd;
+import org.apache.kerby.KOptions;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.KadminOption;
+import org.apache.kerby.kerberos.tool.kadmin.AuthUtil;
+import org.apache.kerby.kerberos.tool.kadmin.ToolUtil;
+import org.apache.kerby.util.OSUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.security.auth.login.LoginException;
+import java.io.File;
+import java.security.Principal;
+import java.util.Map;
+import java.util.Scanner;
+import java.util.Set;
+
+/**
+ * Ref. MIT kadmin cmd tool usage.
+ */
+public class HadminLocalTool {
+    private static final Logger LOG = LoggerFactory.getLogger(HadminLocalTool.class);
+    private static File confDir;
+
+    private static final String PROMPT = HadminLocalTool.class.getSimpleName() + ".local";
+    private static  final String USAGE = (OSUtil.isWindows()
+            ? "Usage: bin\\hadmin-local.cmd" : "Usage: sh bin/kadmin-local.sh")
+            + " <conf-dir> <-c cache_name>|<-k keytab>\n"
+            + "\tExample:\n"
+            + "\t\t"
+            + (OSUtil.isWindows()
+            ? "bin\\hadmin-local.cmd" : "sh bin/hadmin-local.sh")
+            + " conf -k admin.keytab\n";
+
+    private static void printUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(USAGE);
+        System.exit(-1);
+    }
+
+    private static final String LEGAL_COMMANDS = "Available commands are: "
+        + "\n"
+        + "add_principal, addprinc\n"
+        + "                         Add principal\n"
+        + "delete_principal, delprinc\n"
+        + "                         Delete principal\n"
+        + "rename_principal, renprinc\n"
+        + "                         Rename principal\n"
+        + "get_principal, getprinc\n"
+        + "                         Get principal\n"
+        + "list_principals, listprincs\n"
+        + "                         List principals\n"
+        + "ktadd, xst\n"
+        + "                         Add entry(s) to a keytab\n"
+        + "get_hostroles, hostroles\n"
+        + "                         Get hostRoles\n"
+        + "export_keytabs, expkeytabs\n"
+        + "                         Export keytabs\n"
+        + "create_principals, creprincs\n"
+        + "                         Create principals\n"
+        + "enable_configure, enable\n"
+        + "                         Enable configure\n"
+        + "disable_configure, disable\n"
+        + "                         Disable configure\n";
+
+    private static void execute(LocalHasAdmin hadmin, String input) throws HasException {
+        // Omit the leading and trailing whitespace.
+        input = input.trim();
+        if (input.startsWith("cmd")) {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+
+        String[] items = input.split("\\s+");
+        String cmd = items[0];
+        HadminCmd executor;
+        if (cmd.startsWith("add_principal")
+            || cmd.startsWith("addprinc")) {
+            executor = new AddPrincipalCmd(hadmin);
+        } else if (cmd.startsWith("delete_principal")
+            || cmd.startsWith("delprinc")) {
+            executor = new DeletePrincipalCmd(hadmin);
+        } else if (cmd.startsWith("rename_principal")
+            || cmd.startsWith("renprinc")) {
+            executor = new RenamePrincipalCmd(hadmin);
+        } else if (cmd.startsWith("list_principals")
+            || cmd.startsWith("listprincs")) {
+            executor = new ListPrincipalsCmd(hadmin);
+        } else if (cmd.startsWith("ktadd")
+            || cmd.startsWith("xst")) {
+            executor = new KeytabAddCmd(hadmin);
+        } else if (cmd.startsWith("get_hostroles")
+            || cmd.startsWith("hostroles")) {
+            executor = new GetHostRolesCmd(hadmin);
+        } else if (cmd.startsWith("create_principals")
+            || cmd.startsWith("creprincs")) {
+            executor = new AddPrincipalsCmd(hadmin);
+        } else if (cmd.startsWith("export_keytabs")
+            || cmd.startsWith("expkeytabs")) {
+            executor = new ExportKeytabsCmd(hadmin);
+        } else if (cmd.startsWith("enable_configure")
+            || cmd.startsWith("enable")) {
+            executor = new EnableConfigureCmd(hadmin);
+        } else if (cmd.startsWith("disable_configure")
+            || cmd.startsWith("disable")) {
+            executor = new DisableConfigureCmd(hadmin);
+        }  else if (cmd.startsWith("get_principal")
+            || cmd.startsWith("getprinc")) {
+            executor = new GetPrincipalCmd(hadmin);
+        } else {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        executor.execute(items);
+    }
+
+    private static File getConfDir(String[] args) {
+        String envDir;
+        confDir = new File(args[0]);
+        if (confDir == null || !confDir.exists()) {
+            try {
+                Map<String, String> mapEnv = System.getenv();
+                envDir = mapEnv.get("KRB5_KDC_DIR");
+            } catch (SecurityException e) {
+                envDir = null;
+            }
+            if (envDir != null) {
+                confDir = new File(envDir);
+            } else {
+                confDir = new File("/etc/kerby/"); // for Linux. TODO: fix for Win etc.
+            }
+
+            if (!confDir.exists()) {
+                throw new RuntimeException("Can not locate KDC backend directory "
+                        + confDir.getAbsolutePath());
+            }
+        }
+        LOG.info("Conf dir:" + confDir.getAbsolutePath());
+        return confDir;
+    }
+
+    public static void main(String[] args) {
+
+        if (args.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        LocalHasAdmin hadmin;
+        try {
+            hadmin = new LocalHasAdmin(getConfDir(args));
+        } catch (KrbException e) {
+            System.err.println("Failed to init HasAdmin due to " + e.getMessage());
+            return;
+        }
+
+        KOptions kOptions = ToolUtil.parseOptions(args, 1, args.length - 1);
+        if (kOptions == null) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        String hadminPrincipal = hadmin.getHadminPrincipal();
+        Subject subject = null;
+        if (kOptions.contains(KadminOption.CCACHE)) {
+            File ccFile = kOptions.getFileOption(KadminOption.CCACHE);
+            if (ccFile == null || !ccFile.exists()) {
+                printUsage("Need the valid credentials cache file.");
+                return;
+            }
+            try {
+                subject = AuthUtil.loginUsingTicketCache(hadminPrincipal, ccFile);
+            } catch (LoginException e) {
+                System.err.println("Could not login with: " + hadminPrincipal
+                    + e.getMessage());
+                return;
+            }
+        } else if (kOptions.contains(KadminOption.K)) {
+            File keyTabFile = new File(kOptions.getStringOption(KadminOption.K));
+            if (keyTabFile == null || !keyTabFile.exists()) {
+                printUsage("Need the valid keytab file.");
+                return;
+            }
+            try {
+                subject = AuthUtil.loginUsingKeytab(hadminPrincipal, keyTabFile);
+            } catch (LoginException e) {
+                System.err.println("Could not login with: " + hadminPrincipal
+                    + e.getMessage());
+                return;
+            }
+        } else {
+            printUsage("No credentials cache file or keytab file for authentication.");
+        }
+        if (subject != null) {
+            Principal adminPrincipal = new KerberosPrincipal(hadminPrincipal);
+            Set<Principal> princSet = subject.getPrincipals();
+            if (princSet == null || princSet.isEmpty()) {
+                printUsage("The principals in subject is empty.");
+                return;
+            }
+            if (princSet.contains(adminPrincipal)) {
+                System.out.println("Login successful for user: " + hadminPrincipal);
+            } else {
+                printUsage("Login failure for " + hadminPrincipal);
+                return;
+            }
+        } else {
+            printUsage("The subject is null, login failure for " + hadminPrincipal);
+            return;
+        }
+        System.out.println("enter \"cmd\" to see legal commands.");
+        System.out.print(PROMPT + ": ");
+
+        try (Scanner scanner = new Scanner(System.in, "UTF-8")) {
+            String input = scanner.nextLine();
+
+            while (!(input.equals("quit") || input.equals("exit")
+                    || input.equals("q"))) {
+                try {
+                    execute(hadmin, input);
+                } catch (HasException e) {
+                    System.err.println(e.getMessage());
+                }
+                System.out.print(PROMPT + ": ");
+                input = scanner.nextLine();
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
new file mode 100644
index 0000000..2fca203
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
@@ -0,0 +1,61 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+
+public class AddPrincipalCmd extends HadminCmd {
+
+    public static final String USAGE = "Usage: add_principal [options] <principal-name>\n"
+            + "\toptions are:\n"
+            + "\t\t[-randkey]\n"
+            + "\t\t[-pw password]"
+            + "\tExample:\n"
+            + "\t\tadd_principal -pw mypassword alice\n";
+
+    public AddPrincipalCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        String clientPrincipal = items[items.length - 1];
+        if (!items[1].startsWith("-")) {
+            getHadmin().addPrincipal(clientPrincipal);
+        } else if (items[1].startsWith("-randkey")) {
+            getHadmin().addPrincipal(clientPrincipal);
+        } else if (items[1].startsWith("-pw")) {
+            String password = items[2];
+            getHadmin().addPrincipal(clientPrincipal, password);
+        } else {
+            System.err.println("add_principal cmd format error.");
+            System.err.println(USAGE);
+            return;
+        }
+        System.out.println("Success to add principal :" + clientPrincipal);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
new file mode 100644
index 0000000..2dc22fa
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
@@ -0,0 +1,78 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+
+public class AddPrincipalsCmd extends HadminCmd {
+    private static final Logger LOG = LoggerFactory.getLogger(AddPrincipalsCmd.class);
+
+    private static final String USAGE = "\nUsage: create_principals [hostRoles-file]\n"
+            + "\t'hostRoles-file' is a file with a hostRoles json string like:\n"
+            + "\t\t{HOSTS: [ {\"name\":\"host1\",\"hostRoles\":\"HDFS\"}, "
+            + "{\"name\":\"host2\",\"hostRoles\":\"HDFS,HBASE\"} ] }\n"
+            + "\tExample:\n"
+            + "\t\tcreate_principals hostroles.txt\n";
+
+    public AddPrincipalsCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length != 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        File hostRoles = new File(items[1]);
+        if (!hostRoles.exists()) {
+            throw new HasException("HostRoles file is not exists.");
+        }
+        try {
+            BufferedReader reader = new BufferedReader(new FileReader(hostRoles));
+            StringBuilder sb = new StringBuilder();
+            String tempString;
+            while ((tempString = reader.readLine()) != null) {
+                sb.append(tempString);
+            }
+            JSONArray hostArray = new JSONObject(sb.toString()).optJSONArray("HOSTS");
+            for (int i = 0; i < hostArray.length(); i++) {
+                JSONObject host = (JSONObject) hostArray.get(i);
+                String[] roles = host.getString("hostRoles").split(",");
+                for (String role : roles) {
+                    System.out.println(getHadmin().addPrincByRole(host.getString("name"),
+                            role.toUpperCase()));
+                }
+            }
+        } catch (Exception e) {
+            throw new HasException("Failed to execute creating principals, because : " + e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
new file mode 100644
index 0000000..3245a48
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
@@ -0,0 +1,80 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+
+import java.io.Console;
+import java.util.Scanner;
+
+public class DeletePrincipalCmd extends HadminCmd {
+
+    public static final String USAGE = "Usage: delete_principal <principal-name>\n"
+            + "\tExample:\n"
+            + "\t\tdelete_principal alice\n";
+
+    private Boolean force = false;
+
+    public DeletePrincipalCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+        String principal = items[items.length - 1];
+        String reply;
+        Console console = System.console();
+        String prompt = "Are you sure to delete the principal? (yes/no, YES/NO, y/n, Y/N) ";
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                    + "maybe you're running this from within an IDE. "
+                    + "Use scanner to read password.");
+            Scanner scanner = new Scanner(System.in, "UTF-8");
+            reply = getReply(scanner, prompt);
+        } else {
+            reply = getReply(console, prompt);
+        }
+        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
+            getHadmin().deletePrincipal(principal);
+            System.out.println("Success to delete " + principal);
+        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
+            System.out.println("Principal \"" + principal + "\"  not deleted.");
+        } else {
+            System.err.println("Unknown request, fail to delete the principal.");
+            System.err.println(USAGE);
+        }
+    }
+
+    private String getReply(Scanner scanner, String prompt) {
+        System.out.println(prompt);
+        return scanner.nextLine().trim();
+    }
+
+    private String getReply(Console console, String prompt) {
+        console.printf(prompt);
+        String line = console.readLine();
+        return line;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
new file mode 100644
index 0000000..35acb20
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
@@ -0,0 +1,40 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+
+public class DisableConfigureCmd extends HadminCmd {
+
+    public static final String USAGE = "Usage: enable_configure\n"
+            + "\tExample:\n"
+            + "\t\tenable\n";
+
+    public DisableConfigureCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        getHadmin().setEnableOfConf("false");
+        System.out.println("Set conf disable.");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
new file mode 100644
index 0000000..fb27e3a
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
@@ -0,0 +1,40 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+
+public class EnableConfigureCmd extends HadminCmd {
+
+    public static final String USAGE = "Usage: enable_configure\n"
+            + "\tExample:\n"
+            + "\t\tenable\n";
+
+    public EnableConfigureCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        getHadmin().setEnableOfConf("true");
+        System.out.println("Set conf enable.");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
new file mode 100644
index 0000000..a868df3
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
@@ -0,0 +1,57 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+import org.apache.kerby.has.server.web.HostRoleType;
+
+import java.io.File;
+
+public class ExportKeytabsCmd extends HadminCmd {
+    private static final String USAGE = "\nUsage: export_keytabs <host> [role]\n"
+            + "\tExample:\n"
+            + "\t\texport_keytabs host1 HDFS\n";
+
+    public ExportKeytabsCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+        String host = items[1];
+        if (items.length >= 3) {
+            exportKeytab(host, items[2]);
+            return;
+        }
+        for (HostRoleType r : HostRoleType.values()) {
+            exportKeytab(host, r.getName());
+        }
+    }
+
+    public void exportKeytab(String host, String role) throws HasException {
+        File keytab = new File(role + "-" + host + ".keytab");
+        getHadmin().getKeytabByHostAndRole(host, role, keytab);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
new file mode 100644
index 0000000..142f74f
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
@@ -0,0 +1,36 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+public class GetHostRolesCmd extends HadminCmd {
+    private static final String USAGE = "Usage: get_hostroles\n"
+            + "\tExample:\n"
+            + "\t\tget_hostroles\n";
+
+    public GetHostRolesCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) {
+        getHadmin().getHostRoles();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
new file mode 100644
index 0000000..22be3bb
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
@@ -0,0 +1,76 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+import org.apache.kerby.kerberos.kerb.request.KrbIdentity;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+
+import java.util.Map;
+
+public class GetPrincipalCmd extends HadminCmd {
+    private static final String USAGE = "Usage: getprinc principalName\n"
+        + "\tExample:\n"
+        + "\t\tgetprinc hello@TEST.COM\"\n";
+
+    public GetPrincipalCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+  @Override
+  public void execute(String[] items) {
+    if (items.length != 2) {
+      System.err.println(USAGE);
+      return;
+    }
+
+    String princName = items[items.length - 1];
+    KrbIdentity identity = null;
+    try {
+      identity = getHadmin().getPrincipal(princName);
+    } catch (HasException e) {
+      System.err.println("Fail to get principal: " + princName + ". " + e.getMessage());
+    }
+
+    if (identity == null) {
+      System.err.println(princName + " doesn't exist\n");
+      System.err.println(USAGE);
+      return;
+    }
+
+    Map<EncryptionType, EncryptionKey> key = identity.getKeys();
+
+    System.out.println(
+        "Principal: " + identity.getPrincipalName() + "\n"
+            + "Expiration data: " + identity.getExpireTime() + "\n"
+            + "Created time: "
+            + identity.getCreatedTime() + "\n"
+            + "KDC flags: " + identity.getKdcFlags() + "\n"
+            + "Key version: " + identity.getKeyVersion() + "\n"
+            + "Number of keys: " + key.size()
+    );
+
+    for (EncryptionType keyType : key.keySet()) {
+      System.out.println("key: " + keyType);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/HadminCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/HadminCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/HadminCmd.java
new file mode 100644
index 0000000..891453f
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/HadminCmd.java
@@ -0,0 +1,42 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+
+public abstract class HadminCmd {
+
+    private LocalHasAdmin hadmin;
+
+    public HadminCmd(LocalHasAdmin hadmin) {
+        this.hadmin = hadmin;
+    }
+
+    protected LocalHasAdmin getHadmin() {
+        return hadmin;
+    }
+
+    /**
+     * Execute the hadmin cmd.
+     * @param input Input cmd to execute
+     */
+    public abstract void execute(String[] input) throws HasException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
new file mode 100644
index 0000000..bdc42d8
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
@@ -0,0 +1,91 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+
+import java.io.File;
+import java.util.List;
+
+public class KeytabAddCmd extends HadminCmd {
+    private static final String USAGE =
+        "Usage: ktadd [-k[eytab] keytab] [-q] [-e keysaltlist] [-norandkey] [principal | -glob princ-exp] [...]";
+
+    private static final String DEFAULT_KEYTAB_FILE_LOCATION = "/etc/krb5.keytab";
+
+    public KeytabAddCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) {
+
+        String principal = null;
+        String keytabFileLocation = null;
+        Boolean glob = false;
+
+        //Since commands[0] is ktadd, the initial index is 1.
+        int index = 1;
+        while (index < items.length) {
+            String command = items[index];
+            if (command.equals("-k")) {
+                index++;
+                if (index >= items.length) {
+                    System.err.println(USAGE);
+                    return;
+                }
+                keytabFileLocation = items[index].trim();
+
+            } else if (command.equals("-glob")) {
+                glob = true;
+            } else if (!command.startsWith("-")) {
+                principal = command;
+            }
+            index++;
+        }
+
+        if (keytabFileLocation == null) {
+            keytabFileLocation = DEFAULT_KEYTAB_FILE_LOCATION;
+        }
+        File keytabFile = new File(keytabFileLocation);
+
+        if (principal == null) {
+            System.out.println((glob ? "princ-exp" : "principal") + " not specified!");
+            System.err.println(USAGE);
+            return;
+        }
+
+        try {
+            if (glob) {
+                List<String> principals = getHadmin().getPrincipals(principal);
+                if (principals.size() != 0) {
+                    getHadmin().exportKeytab(keytabFile, principals);
+                }
+            } else {
+                getHadmin().exportKeytab(keytabFile, principal);
+            }
+            System.out.println("Principal export to keytab file : " + keytabFile + " successful .");
+        } catch (HasException e) {
+            System.err.println("Principal \"" + principal + "\" fail to add entry to keytab."
+                    + e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
new file mode 100644
index 0000000..43661f2
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
@@ -0,0 +1,63 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+
+import java.util.List;
+
+public class ListPrincipalsCmd extends HadminCmd {
+    private static final String USAGE = "Usage: list_principals [expression]\n"
+            + "\t'expression' is a shell-style glob expression that can contain the wild-card characters ?, *, and []."
+            + "\tExample:\n"
+            + "\t\tlist_principals [expression]\n";
+
+    public ListPrincipalsCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length > 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        List<String> principalLists = null;
+
+        if (items.length == 1) {
+            principalLists = getHadmin().getPrincipals();
+        } else {
+            //have expression
+            String exp = items[1];
+            principalLists = getHadmin().getPrincipals(exp);
+        }
+
+        if (principalLists.size() == 0 || principalLists.size() == 1 && principalLists.get(0).isEmpty()) {
+            return;
+        } else {
+            System.out.println("Principals are listed:");
+            for (int i = 0; i < principalLists.size(); i++) {
+                System.out.println(principalLists.get(i));
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
new file mode 100644
index 0000000..e28b2c2
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/kerby/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
@@ -0,0 +1,82 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.server.hadmin.local.cmd;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+
+import java.io.Console;
+import java.util.Scanner;
+
+public class RenamePrincipalCmd extends HadminCmd {
+    public static final String USAGE = "Usage: rename_principal <old_principal_name>"
+            + " <new_principal_name>\n"
+            + "\tExample:\n"
+            + "\t\trename_principal alice bob\n";
+
+    public RenamePrincipalCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 3) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        String oldPrincipalName = items[items.length - 2];
+        String newPrincipalName = items[items.length - 1];
+
+        String reply;
+        Console console = System.console();
+        String prompt = "Are you sure to rename the principal? (yes/no, YES/NO, y/n, Y/N) ";
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                    + "maybe you're running this from within an IDE. "
+                    + "Use scanner to read password.");
+            Scanner scanner = new Scanner(System.in, "UTF-8");
+            reply = getReply(scanner, prompt);
+        } else {
+            reply = getReply(console, prompt);
+        }
+        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
+            getHadmin().renamePrincipal(oldPrincipalName, newPrincipalName);
+            System.out.println("Success to rename principal : \"" + oldPrincipalName
+                + "\" to \"" + newPrincipalName + "\".");
+        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
+            System.out.println("Principal \"" + oldPrincipalName + "\"  not renamed.");
+        } else {
+            System.err.println("Unknown request, fail to rename the principal.");
+            System.err.println(USAGE);
+        }
+    }
+
+    private String getReply(Scanner scanner, String prompt) {
+        System.out.println(prompt);
+        return scanner.nextLine().trim();
+    }
+
+    private String getReply(Console console, String prompt) {
+        console.printf(prompt);
+        String line = console.readLine();
+        return line;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-tool/pom.xml b/has/has-tool/pom.xml
index a43041a..7a65184 100644
--- a/has/has-tool/pom.xml
+++ b/has/has-tool/pom.xml
@@ -4,7 +4,7 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
   <parent>
-    <groupId>org.apache.hadoop</groupId>
+    <groupId>org.apache.kerby</groupId>
     <artifactId>has-project</artifactId>
     <version>1.0.0-SNAPSHOT</version>
   </parent>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/pom.xml
----------------------------------------------------------------------
diff --git a/has/pom.xml b/has/pom.xml
index ad80711..d9e41b1 100644
--- a/has/pom.xml
+++ b/has/pom.xml
@@ -25,7 +25,7 @@
   </parent>
 
   <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache.hadoop</groupId>
+  <groupId>org.apache.kerby</groupId>
   <artifactId>has-project</artifactId>
   <version>1.0.0-SNAPSHOT</version>
   <description>Hadoop Authentication Server</description>
@@ -46,6 +46,7 @@
     <kerby.version>1.1.0-SNAPSHOT</kerby.version>
     <slf4j.version>1.7.25</slf4j.version>
     <buildtools.dir>${basedir}/build-tools</buildtools.dir>
+    <hadoop.version>3.0.0-alpha2</hadoop.version>
   </properties>
 
   <build>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/supports/hadoop/hadoop-2.7.2.patch
----------------------------------------------------------------------
diff --git a/has/supports/hadoop/hadoop-2.7.2.patch b/has/supports/hadoop/hadoop-2.7.2.patch
index 336a83d..85c7c3f 100644
--- a/has/supports/hadoop/hadoop-2.7.2.patch
+++ b/has/supports/hadoop/hadoop-2.7.2.patch
@@ -7,7 +7,7 @@ index aa3c2c7..e4f1fd2 100644
        <scope>test</scope>
      </dependency>
 +    <dependency>
-+      <groupId>org.apache.hadoop</groupId>
++      <groupId>org.apache.kerby</groupId>
 +      <artifactId>has-client</artifactId>
 +     <version>1.0.0-SNAPSHOT</version>
 +    </dependency>
@@ -24,7 +24,7 @@ index f7f5f63..80b7aca 100644
        ? "com.ibm.security.auth.module.Krb5LoginModule"
 -      : "com.sun.security.auth.module.Krb5LoginModule";
 +//      : "com.sun.security.auth.module.Krb5LoginModule";
-+      :"org.apache.hadoop.has.client.HasLoginModule";
++      :"org.apache.kerby.has.client.HasLoginModule";
    }
    
    public static Oid getOidInstance(String oidName) 

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
----------------------------------------------------------------------
diff --git a/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch b/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
index bef04b4..f00cec5 100644
--- a/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
+++ b/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
@@ -8,7 +8,7 @@ index ca0fce2..b43476d 100644
        ? "com.ibm.security.auth.module.Krb5LoginModule"
 -      : "com.sun.security.auth.module.Krb5LoginModule";
 +//      : "com.sun.security.auth.module.Krb5LoginModule";
-+      :"org.apache.hadoop.has.client.HasLoginModule";
++      :"org.apache.kerby.has.client.HasLoginModule";
    }
    
    public static Oid getOidInstance(String oidName) 

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/supports/zookeeper/conf/jaas.conf
----------------------------------------------------------------------
diff --git a/has/supports/zookeeper/conf/jaas.conf b/has/supports/zookeeper/conf/jaas.conf
index 62db69a..570009f 100644
--- a/has/supports/zookeeper/conf/jaas.conf
+++ b/has/supports/zookeeper/conf/jaas.conf
@@ -8,6 +8,6 @@
   };
 
 Client {
-  org.apache.hadoop.has.client.HasLoginModule required
+  org.apache.kerby.has.client.HasLoginModule required
   useTgtTicket=true;
 };

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/supports/zookeeper/pom.xml
----------------------------------------------------------------------
diff --git a/has/supports/zookeeper/pom.xml b/has/supports/zookeeper/pom.xml
index d2cdc13..e78f71a 100644
--- a/has/supports/zookeeper/pom.xml
+++ b/has/supports/zookeeper/pom.xml
@@ -5,7 +5,7 @@
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
-    <groupId>org.apache.hadoop</groupId>
+    <groupId>org.apache.kerby</groupId>
     <artifactId>has-project</artifactId>
     <version>1.0.0-SNAPSHOT</version>
   </parent>
@@ -16,7 +16,7 @@
 
   <dependencies>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.kerby</groupId>
       <artifactId>has-client</artifactId>
       <version>${project.version}</version>
     </dependency>


[08/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
deleted file mode 100644
index 82bb129..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-@InterfaceAudience.Public
-@InterfaceStability.Stable
-public enum HostRoleType {
-    HDFS("HDFS", new String[]{"HTTP", "hdfs"}),
-    YARN("YARN", new String[]{"yarn"}),
-    MAPRED("MAPRED", new String[]{"mapred"}),
-    HBASE("HBASE", new String[]{"hbase"}),
-    ZOOKEEPER("ZOOKEEPER", new String[]{"zookeeper"}),
-    SPARK("SPARK", new String[]{"spark"}),
-    HIVE("HIVE", new String[]{"hive"}),
-    OOZIE("OOZIE", new String[]{"oozie"}),
-    HUE("HUE", new String[]{"hue"});
-
-    private String name;
-    private String[] princs;
-
-    HostRoleType(String name, String[] princs) {
-        this.name = name;
-        this.princs = princs;
-    }
-
-    public String[] getPrincs() {
-        return princs;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
deleted file mode 100644
index bd0a1ca..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.has.server.web;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-
-/** 
- * This class contains constants for configuration keys and default values
- * used in hdfs.
- */
-@InterfaceAudience.Private
-public class WebConfigKey {
-
-  public static final int HAS_HTTP_PORT_DEFAULT = 9870;
-  public static final String HAS_HTTP_HOST_DEFAULT = "0.0.0.0";
-  public static final String HAS_HTTP_ADDRESS_KEY = "has.http-address";
-  public static final String HAS_HTTP_ADDRESS_DEFAULT = HAS_HTTP_HOST_DEFAULT + ":" + HAS_HTTP_PORT_DEFAULT;
-
-  public static final String HAS_HTTPS_BIND_HOST_KEY = "has.https-bind-host";
-  public static final int HAS_HTTPS_PORT_DEFAULT = 9871;
-  public static final String HAS_HTTPS_HOST_DEFAULT = "0.0.0.0";
-  public static final String HAS_HTTPS_ADDRESS_KEY = "has.https-address";
-  public static final String HAS_HTTPS_ADDRESS_DEFAULT = HAS_HTTPS_HOST_DEFAULT + ":" + HAS_HTTPS_PORT_DEFAULT;
-  public static final String HAS_HTTP_POLICY_KEY = "has.http.policy";
-  public static final String HAS_HTTP_POLICY_DEFAULT = HttpConfig.Policy.HTTPS_ONLY.name();
-
-  public static final String HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY = "has.https.server.keystore.resource";
-  public static final String HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT = "ssl-server.xml";
-  public static final String HAS_SERVER_HTTPS_KEYPASSWORD_KEY = "ssl.server.keystore.keypassword";
-  public static final String HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY = "ssl.server.keystore.password";
-  public static final String HAS_SERVER_HTTPS_KEYSTORE_LOCATION_KEY = "ssl.server.keystore.location";
-  public static final String HAS_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY = "ssl.server.truststore.location";
-  public static final String HAS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY = "ssl.server.truststore.password";
-  public static final String HAS_CLIENT_HTTPS_NEED_AUTH_KEY = "has.client.https.need-auth";
-  public static final boolean HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT = false;
-
-  public static final String HAS_AUTHENTICATION_FILTER_KEY = "has.web.authentication.filter";
-  public static final String HAS_AUTHENTICATION_FILTER_DEFAULT = AuthenticationFilter.class.getName();
-
-  public static final String HAS_AUTHENTICATION_FILTER_AUTH_TYPE = "has.authentication.filter.auth.type";
-  public static final String HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY = "has.authentication.kerberos.principal";
-  public static final String HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY = "has.authentication.kerberos.keytab";
-  public static final String HAS_AUTHENTICATION_KERBEROS_NAME_RULES = "has.authentication.kerberos.name.rules";
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
deleted file mode 100644
index 3e5f832..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
+++ /dev/null
@@ -1,348 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.has.server.web;
-
-import org.apache.hadoop.HadoopIllegalArgumentException;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.hadoop.has.server.web.rest.HasApi;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.http.HttpServer2;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.ServletContext;
-import java.io.File;
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.URI;
-import java.util.HashMap;
-import java.util.Map;
-
-public class WebServer {
-    public static final Logger LOG = LoggerFactory.getLogger(WebServer.class);
-
-    private HttpServer2 httpServer;
-    private final HasConfig conf;
-
-    private InetSocketAddress httpAddress;
-    private InetSocketAddress httpsAddress;
-
-    protected static final String HAS_SERVER_ATTRIBUTE_KEY = "hasserver";
-
-    public WebServer(HasConfig conf) {
-        this.conf = conf;
-    }
-
-    public HasConfig getConf() {
-        return conf;
-    }
-
-    private void init() {
-
-        final String pathSpec = "/has/v1/*";
-
-        // add has packages
-        httpServer.addJerseyResourcePackage(HasApi.class
-                .getPackage().getName(),
-            pathSpec);
-    }
-
-    public void defineFilter() {
-        String authType = conf.getString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE);
-        if (authType.equals("kerberos")) {
-            // add authentication filter for webhdfs
-            final String className = conf.getString(
-                WebConfigKey.HAS_AUTHENTICATION_FILTER_KEY,
-                WebConfigKey.HAS_AUTHENTICATION_FILTER_DEFAULT);
-
-            final String name = className;
-
-            Map<String, String> params = getAuthFilterParams(conf);
-
-            String adminPathSpec = "/has/v1/admin/*";
-            HttpServer2.defineFilter(httpServer.getWebAppContext(), name, className,
-                params, new String[]{adminPathSpec});
-            HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
-                + ")");
-        }
-    }
-
-    public void defineConfFilter() {
-        String confFilterName = ConfFilter.class.getName();
-        String confPath = "/has/v1/conf/*";
-        HttpServer2.defineFilter(httpServer.getWebAppContext(), confFilterName, confFilterName,
-                getAuthFilterParams(conf), new String[]{confPath});
-        HttpServer2.LOG.info("Added filter '" + confFilterName + "' (class=" + confFilterName
-                + ")");
-    }
-
-    private Map<String, String> getAuthFilterParams(HasConfig conf) {
-        Map<String, String> params = new HashMap<String, String>();
-
-        String authType = conf.getString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE);
-        if (authType != null && !authType.isEmpty()) {
-            params.put(AuthenticationFilter.AUTH_TYPE, authType);
-        }
-        String principal = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
-        if (principal != null && !principal.isEmpty()) {
-            try {
-                principal = SecurityUtil.getServerPrincipal(principal,
-                    getHttpsAddress().getHostName());
-            } catch (IOException e) {
-                LOG.warn("Errors occurred when get server principal. " + e.getMessage());
-            }
-            params.put(KerberosAuthenticationHandler.PRINCIPAL, principal);
-        }
-        String keytab = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
-        if (keytab != null && !keytab.isEmpty()) {
-            params.put(KerberosAuthenticationHandler.KEYTAB, keytab);
-        }
-        String rule = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_NAME_RULES);
-        if (rule != null && !rule.isEmpty()) {
-            params.put(KerberosAuthenticationHandler.NAME_RULES, rule);
-        } else {
-            params.put(KerberosAuthenticationHandler.NAME_RULES, "DEFAULT");
-        }
-        return params;
-    }
-
-    public InetSocketAddress getBindAddress() {
-        if (httpAddress != null) {
-            return httpAddress;
-        } else if (httpsAddress != null) {
-            return httpsAddress;
-        } else {
-            return null;
-        }
-    }
-
-    /**
-     * for information related to the different configuration options and
-     * Http Policy is decided.
-     */
-    public void start() throws HasException {
-
-        HttpConfig.Policy policy = getHttpPolicy(conf);
-
-        final String bindHost =
-            conf.getString(WebConfigKey.HAS_HTTPS_BIND_HOST_KEY);
-        InetSocketAddress httpAddr = null;
-        if (policy.isHttpEnabled()) {
-            final String httpAddrString = conf.getString(
-                WebConfigKey.HAS_HTTP_ADDRESS_KEY,
-                WebConfigKey.HAS_HTTP_ADDRESS_DEFAULT);
-            httpAddr = NetUtils.createSocketAddr(httpAddrString);
-            if (bindHost != null && !bindHost.isEmpty()) {
-                httpAddr = new InetSocketAddress(bindHost, httpAddr.getPort());
-            }
-            LOG.info("Get the http address: " + httpAddr);
-        }
-
-        InetSocketAddress httpsAddr = null;
-        if (policy.isHttpsEnabled()) {
-            final String httpsAddrString = conf.getString(
-                WebConfigKey.HAS_HTTPS_ADDRESS_KEY,
-                WebConfigKey.HAS_HTTPS_ADDRESS_DEFAULT);
-            httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
-
-            if (bindHost != null && !bindHost.isEmpty()) {
-                httpsAddr = new InetSocketAddress(bindHost, httpsAddr.getPort());
-            }
-            LOG.info("Get the https address: " + httpsAddr);
-        }
-
-        HttpServer2.Builder builder = httpServerTemplateForHAS(conf, httpAddr, httpsAddr, "has");
-
-        try {
-            httpServer = builder.build();
-        } catch (IOException e) {
-            throw new HasException("Errors occurred when building http server. " + e.getMessage());
-        }
-
-        init();
-
-        try {
-            httpServer.start();
-        } catch (IOException e) {
-            throw new HasException("Errors occurred when starting http server. " + e.getMessage());
-        }
-        int connIdx = 0;
-        if (policy.isHttpEnabled()) {
-            httpAddress = httpServer.getConnectorAddress(connIdx++);
-            conf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY,
-                NetUtils.getHostPortString(httpAddress));
-        }
-
-        if (policy.isHttpsEnabled()) {
-            httpsAddress = httpServer.getConnectorAddress(connIdx);
-            conf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY,
-                NetUtils.getHostPortString(httpsAddress));
-        }
-    }
-
-    public void setWebServerAttribute(HasServer hasServer) {
-        httpServer.setAttribute(HAS_SERVER_ATTRIBUTE_KEY, hasServer);
-    }
-
-    public static HasServer getHasServerFromContext(ServletContext context) {
-        return (HasServer) context.getAttribute(HAS_SERVER_ATTRIBUTE_KEY);
-    }
-
-    /**
-     * Get http policy.
-     */
-    public HttpConfig.Policy getHttpPolicy(HasConfig conf) {
-        String policyStr = conf.getString(WebConfigKey.HAS_HTTP_POLICY_KEY,
-            WebConfigKey.HAS_HTTP_POLICY_DEFAULT);
-        HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
-        if (policy == null) {
-            throw new HadoopIllegalArgumentException("Unregonized value '"
-                + policyStr + "' for " + WebConfigKey.HAS_HTTP_POLICY_KEY);
-        }
-
-        conf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, policy.name());
-        return policy;
-    }
-
-    /**
-     * Return a HttpServer.Builder that the ssm can use to
-     * initialize their HTTP / HTTPS server.
-     */
-    public HttpServer2.Builder httpServerTemplateForHAS(
-        HasConfig conf, final InetSocketAddress httpAddr, final InetSocketAddress httpsAddr,
-        String name) throws HasException {
-        HttpConfig.Policy policy = getHttpPolicy(conf);
-
-        HttpServer2.Builder builder = new HttpServer2.Builder().setName(name);
-
-        if (policy.isHttpEnabled()) {
-            if (httpAddr.getPort() == 0) {
-                builder.setFindPort(true);
-            }
-
-            URI uri = URI.create("http://" + NetUtils.getHostPortString(httpAddr));
-            builder.addEndpoint(uri);
-            LOG.info("Starting Web-server for " + name + " at: " + uri);
-        }
-
-        if (policy.isHttpsEnabled() && httpsAddr != null) {
-            HasConfig sslConf = loadSslConfiguration(conf);
-            loadSslConfToHttpServerBuilder(builder, sslConf);
-
-            if (httpsAddr.getPort() == 0) {
-                builder.setFindPort(true);
-            }
-
-            URI uri = URI.create("https://" + NetUtils.getHostPortString(httpsAddr));
-            builder.addEndpoint(uri);
-            LOG.info("Starting Web-server for " + name + " at: " + uri);
-        }
-
-        return builder;
-    }
-
-    /**
-     * Load HTTPS-related configuration.
-     */
-    public HasConfig loadSslConfiguration(HasConfig conf) throws HasException {
-        HasConfig sslConf = new HasConfig();
-
-        String sslConfigString = conf.getString(
-            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
-            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT);
-        LOG.info("Get the ssl config file: " + sslConfigString);
-        try {
-            sslConf.addIniConfig(new File(sslConfigString));
-        } catch (IOException e) {
-            throw new HasException("Errors occurred when adding config. " + e.getMessage());
-        }
-
-        final String[] reqSslProps = {
-            WebConfigKey.HAS_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY,
-            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_LOCATION_KEY,
-            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY,
-            WebConfigKey.HAS_SERVER_HTTPS_KEYPASSWORD_KEY
-        };
-
-        // Check if the required properties are included
-        for (String sslProp : reqSslProps) {
-            if (sslConf.getString(sslProp) == null) {
-                LOG.warn("SSL config " + sslProp + " is missing. If "
-                    + WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY
-                    + " is specified, make sure it is a relative path");
-            }
-        }
-
-        boolean requireClientAuth = conf.getBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY,
-            WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
-        sslConf.setBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY, requireClientAuth);
-        return sslConf;
-    }
-
-    public HttpServer2.Builder loadSslConfToHttpServerBuilder(HttpServer2.Builder builder,
-                                                              HasConfig sslConf) {
-        return builder
-            .needsClientAuth(
-                sslConf.getBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY,
-                    WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT))
-            .keyPassword(getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_KEYPASSWORD_KEY))
-            .keyStore(sslConf.getString("ssl.server.keystore.location"),
-                getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY),
-                sslConf.getString("ssl.server.keystore.type", "jks"))
-            .trustStore(sslConf.getString("ssl.server.truststore.location"),
-                getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY),
-                sslConf.getString("ssl.server.truststore.type", "jks"))
-            .excludeCiphers(
-                sslConf.getString("ssl.server.exclude.cipher.list"));
-    }
-
-    /**
-     * Leverages the Configuration.getPassword method to attempt to get
-     * passwords from the CredentialProvider API before falling back to
-     * clear text in config - if falling back is allowed.
-     *
-     * @param conf  Configuration instance
-     * @param alias name of the credential to retreive
-     * @return String credential value or null
-     */
-    public String getPassword(HasConfig conf, String alias) {
-
-        return conf.getString(alias);
-    }
-
-    public void stop() throws Exception {
-        if (httpServer != null) {
-            httpServer.stop();
-        }
-    }
-
-    public InetSocketAddress getHttpAddress() {
-        return httpAddress;
-    }
-
-    public InetSocketAddress getHttpsAddress() {
-        return httpsAddress;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
deleted file mode 100644
index a6fc4ce..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest;
-
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.hadoop.has.server.web.WebServer;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
-import org.apache.kerby.kerberos.kerb.server.KdcUtil;
-
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * HAS configure web methods implementation.
- */
-@Path("/conf")
-public class ConfApi {
-
-    @Context
-    private ServletContext context;
-
-    @Context
-    private HttpServletRequest httpRequest;
-
-    /**
-     * Set HAS plugin.
-     *
-     * @param plugin HAS plugin name
-     * @return Response
-     */
-    @PUT
-    @Path("/setplugin")
-    @Consumes({MediaType.TEXT_PLAIN})
-    @Produces({MediaType.TEXT_PLAIN})
-    public Response setPlugin(@QueryParam("plugin") final String plugin) {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            WebServer.LOG.info("Set HAS plugin...");
-            try {
-                Map<String, String> values = new HashMap<>();
-                File hasConfFile = new File(hasServer.getConfDir(), "has-server.conf");
-                HasConfig hasConfig = HasUtil.getHasConfig(hasConfFile);
-                if (hasConfig != null) {
-                    String defaultValue = hasConfig.getPluginName();
-                    values.put(defaultValue, plugin);
-                } else {
-                    throw new RuntimeException("has-server.conf not found. ");
-                }
-                hasServer.updateConfFile("has-server.conf", values);
-            } catch (IOException | HasException e) {
-                throw new RuntimeException("Failed to set HAS plugin. ", e);
-            }
-            WebServer.LOG.info("HAS plugin set successfully.");
-
-            return Response.status(200).entity("HAS plugin set successfully.\n").build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Config HAS server backend.
-     *
-     * @param backendType type of backend
-     * @param dir         json dir
-     * @param driver      mysql JDBC connector driver
-     * @param url         mysql JDBC connector url
-     * @param user        mysql user name
-     * @param password    mysql password of user
-     * @return Response
-     */
-    @PUT
-    @Path("/configkdcbackend")
-    @Consumes({MediaType.APPLICATION_JSON})
-    @Produces({MediaType.TEXT_PLAIN})
-    public Response configKdcBackend(
-        @QueryParam("backendType") final String backendType,
-        @QueryParam("dir") @DefaultValue("/tmp/has/jsonbackend") final String dir,
-        @QueryParam("driver") @DefaultValue("com.mysql.jdbc.Driver") final String driver,
-        @QueryParam("url") @DefaultValue("jdbc:mysql://127.0.0.1:3306/mysqlbackend") final String url,
-        @QueryParam("user") @DefaultValue("root") final String user,
-        @QueryParam("password") @DefaultValue("passwd") final String password) {
-
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            if ("json".equals(backendType)) {
-                WebServer.LOG.info("Set Json backend...");
-                try {
-                    Map<String, String> values = new HashMap<>();
-                    values.put("_JAR_", "org.apache.kerby.kerberos.kdc.identitybackend.JsonIdentityBackend");
-                    values.put("#_JSON_DIR_", "backend.json.dir = " + dir);
-                    values.put("#_MYSQL_\n", "");
-                    hasServer.updateConfFile("backend.conf", values);
-                } catch (IOException | HasException e) {
-                    throw new RuntimeException("Failed to set Json backend. ", e);
-                }
-                WebServer.LOG.info("Json backend set successfully.");
-
-                return Response.status(200).entity("Json backend set successfully.\n").build();
-            } else if ("mysql".equals(backendType)) {
-                WebServer.LOG.info("Set MySQL backend...");
-                try {
-                    String mysqlConfig = "mysql_driver = " + driver + "\nmysql_url = " + url
-                        + "\nmysql_user = " + user + "\nmysql_password = " + password;
-                    Map<String, String> values = new HashMap<>();
-                    values.put("_JAR_", "org.apache.hadoop.has.server.kdc.MySQLIdentityBackend");
-                    values.put("#_JSON_DIR_\n", "");
-                    values.put("#_MYSQL_", mysqlConfig);
-                    hasServer.updateConfFile("backend.conf", values);
-                } catch (IOException | HasException e) {
-                    throw new RuntimeException("Failed to set MySQL backend. ", e);
-                }
-                WebServer.LOG.info("MySQL backend set successfully.");
-
-                return Response.status(200).entity("MySQL backend set successfully.\n").build();
-            } else {
-                return Response.status(400).entity(backendType + " is not supported.\n").build();
-            }
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Config HAS server KDC.
-     * @param port KDC port to set
-     * @param realm KDC realm to set
-     * @param host KDC host to set
-     * @return Response
-     */
-    @PUT
-    @Path("/configkdc")
-    @Consumes({MediaType.TEXT_PLAIN})
-    @Produces({MediaType.TEXT_PLAIN})
-    public Response configKdc(
-        @QueryParam("port") final int port,
-        @QueryParam("realm") final String realm,
-        @QueryParam("host") final String host) {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            WebServer.LOG.info("Config HAS server KDC...");
-            try {
-                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
-                String backendJar = backendConfig.getString("kdc_identity_backend");
-                if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
-                    hasServer.configMySQLKdc(backendConfig, realm, port, host, hasServer);
-                } else {
-                    Map<String, String> values = new HashMap<>();
-                    values.put("_HOST_", host);
-                    values.put("_PORT_", String.valueOf(port));
-                    values.put("_REALM_", realm);
-                    hasServer.updateConfFile("kdc.conf", values);
-                    String kdc = "\t\tkdc = " + host + ":" + port;
-                    values.put("_KDCS_", kdc);
-                    values.put("_UDP_LIMIT_", "4096");
-                    hasServer.updateConfFile("krb5.conf", values);
-                }
-            } catch (IOException | HasException | KrbException e) {
-                throw new RuntimeException("Failed to config HAS KDC. ", e);
-            }
-            WebServer.LOG.info("HAS server KDC set successfully.");
-            return Response.status(200).entity("HAS server KDC set successfully.\n").build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java
deleted file mode 100644
index 1b84639..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java
+++ /dev/null
@@ -1,455 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-import org.apache.hadoop.has.server.web.HostRoleType;
-import org.apache.hadoop.has.server.web.WebServer;
-import org.apache.hadoop.has.server.web.rest.param.HostParam;
-import org.apache.hadoop.has.server.web.rest.param.HostRoleParam;
-import org.apache.hadoop.has.server.web.rest.param.PasswordParam;
-import org.apache.hadoop.has.server.web.rest.param.PrincipalParam;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.BufferedInputStream;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
-
-/**
- * HAS HasAdmin web methods implementation.
- */
-@Path("/admin")
-public class HadminApi {
-
-    @Context
-    private ServletContext context;
-
-    @Context
-    private HttpServletRequest httpRequest;
-
-    private void compressFile(File file, ZipOutputStream out, String basedir) {
-        if (!file.exists()) {
-            return;
-        }
-        try {
-            BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
-            ZipEntry entry = new ZipEntry(basedir + file.getName());
-            out.putNextEntry(entry);
-            int count;
-            byte[] data = new byte[8192];
-            while ((count = bis.read(data, 0, 8192)) != -1) {
-                out.write(data, 0, count);
-            }
-            bis.close();
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * @param host Hadoop node
-     * @param role Hadoop role
-     * @return Response
-     */
-    @GET
-    @Path("/exportkeytabs")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response exportKeytabs(@QueryParam(HostParam.NAME) @DefaultValue(HostParam.DEFAULT)
-                                  final HostParam host,
-                                  @QueryParam(HostRoleParam.NAME) @DefaultValue(HostRoleParam.DEFAULT)
-                                  final HostRoleParam role) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to export keytabs.");
-            LocalHasAdmin hasAdmin = null;
-            HasServer hasServer = null;
-            try {
-                hasServer = WebServer.getHasServerFromContext(context);
-                hasAdmin = new LocalHasAdmin(hasServer);
-            } catch (KrbException e) {
-                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-            }
-            if (host.getValue() != null) {
-                if (role.getValue() != null) {
-                    try {
-                        File file = hasAdmin.getKeytabByHostAndRole(host.getValue(), role.getValue());
-                        WebServer.LOG.info("Create keytab file for the " + role.getValue()
-                            + " for " + host.getValue());
-                        return Response.ok(file).header("Content-Disposition",
-                            "attachment; filename=" + role.getValue() + "-"
-                                + host.getValue() + ".keytab").build();
-                    } catch (HasException e) {
-                        WebServer.LOG.error("Failed to export keytab File because : " + e.getMessage());
-                    }
-                } else {
-                    //export keytabs zip file
-                    List<File> keytabs = new ArrayList<>();
-                    for (HostRoleType r : HostRoleType.values()) {
-                        try {
-                            keytabs.add(hasAdmin.getKeytabByHostAndRole(host.getValue(), r.getName()));
-                            WebServer.LOG.info("Create keytab file for the " + r.getName()
-                                + " for " + host.getValue());
-                        } catch (HasException e) {
-                            WebServer.LOG.info("Failed to export keytab File because : " + e.getMessage());
-                        }
-                    }
-                    if (keytabs.size() < 1) {
-                        return Response.serverError().build();
-                    }
-                    File path = new File(hasServer.getWorkDir(), "tmp/zip/"
-                        + System.currentTimeMillis());
-                    path.mkdirs();
-                    File keytabZip = new File(path, "keytab.zip");
-                    if (keytabZip.exists()) {
-                        keytabZip.delete();
-                    }
-                    try {
-                        ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(keytabZip));
-                        for (File keytab : keytabs) {
-                            compressFile(keytab, zos, "");
-                        }
-                        zos.close();
-                        WebServer.LOG.info("Success to create the keytab.zip.");
-                        return Response.ok(keytabZip).header("Content-Disposition",
-                            "attachment; filename=keytab.zip").build();
-                    } catch (Exception e) {
-                        WebServer.LOG.error("Failed to create the keytab.zip,because : " + e.getMessage());
-                    }
-                }
-            }
-            return Response.serverError().build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * export single keytab file
-     *
-     * @param principal principal name to export keytab file
-     * @return Response
-     */
-    @GET
-    @Path("/exportkeytab")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response exportKeytab(@QueryParam("principal") final String principal) {
-        if (httpRequest.isSecure()) {
-            LocalHasAdmin hasAdmin = null;
-            WebServer.LOG.info("Exporting keytab file for " + principal + "...");
-            try {
-                HasServer hasServer = WebServer.getHasServerFromContext(context);
-                hasAdmin = new LocalHasAdmin(hasServer);
-            } catch (KrbException e) {
-                WebServer.LOG.error("Failed to create local hadmin." + e.getMessage());
-            }
-            WebServer.LOG.info("Create keytab file for " + principal + " successfully.");
-            if (principal != null) {
-                try {
-                    File path = new File("/tmp/" + System.currentTimeMillis());
-                    if (path.mkdirs()) {
-                        File keytabFile = new File(path, principal + ".keytab");
-                        hasAdmin.exportKeytab(keytabFile, principal);
-                        return Response.ok(keytabFile).header("Content-Disposition", "attachment; filename="
-                            + keytabFile.getName()).build();
-                    }
-                } catch (HasException e) {
-                    WebServer.LOG.error("Failed to export keytab. " + e.toString());
-                    return Response.serverError().build();
-                }
-            }
-            return Response.serverError().build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @PUT
-    @Path("/setconf")
-    @Produces(MediaType.APPLICATION_JSON)
-    public Response setConf(@QueryParam("isEnable") String isEnable) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to admin/setconf.");
-            final HasServer hasServer = WebServer.getHasServerFromContext(
-                context);
-            File hasConf = new File(hasServer.getConfDir(), "has-server.conf");
-            if (!hasConf.exists()) {
-                WebServer.LOG.error("has-server.conf is not exists.");
-                return Response.serverError().entity("has-server.conf is not exists.")
-                    .build();
-            }
-            String result = "";
-            if (isEnable.equals("true")) {
-                result = "enable";
-            } else if (isEnable.equals("false")) {
-                result = "disable";
-            } else {
-                WebServer.LOG.error("Value of isEnable is error.");
-                return Response.serverError().entity("Value of isEnable is error.")
-                    .build();
-            }
-            try {
-                HasUtil.setEnableConf(hasConf, isEnable);
-            } catch (Exception e) {
-                WebServer.LOG.error(e.getMessage());
-                return Response.serverError().entity(e.getMessage()).build();
-            }
-            return Response.ok("Set conf to " + result).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @GET
-    @Path("/getprincipals")
-    @Produces(MediaType.APPLICATION_JSON)
-    public Response getprincipals(@QueryParam("exp") String exp) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to get principals.");
-            JSONObject result = new JSONObject();
-            String msg;
-            LocalHasAdmin hasAdmin = null;
-            try {
-                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
-            } catch (KrbException e) {
-                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-            }
-            try {
-                JSONArray principals = new JSONArray();
-                List<String> princList = hasAdmin.getPrincipals(exp);
-                for (String princ : princList) {
-                    principals.put(princ);
-                }
-                WebServer.LOG.info("Success to get principals with JSON.");
-                result.put("result", "success");
-                result.put("msg", principals.toString());
-                return Response.ok(result.toString()).build();
-            } catch (Exception e) {
-                WebServer.LOG.error("Failed to get principals,because : " + e.getMessage());
-                msg = "Failed to get principals,because : " + e.getMessage();
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Add principal by name and password.
-     *
-     * @param principal principal name.
-     * @param password  principal password
-     * @return Response
-     */
-    @POST
-    @Path("/addprincipal")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response addprincipal(@QueryParam(PrincipalParam.NAME) @DefaultValue(PrincipalParam.DEFAULT)
-                                 final PrincipalParam principal,
-                                 @QueryParam(PasswordParam.NAME) @DefaultValue(PasswordParam.DEFAULT)
-                                 final PasswordParam password) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to add the principal named " + principal.getValue());
-            LocalHasAdmin hasAdmin = null;
-            try {
-                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
-            } catch (KrbException e) {
-                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-            }
-            JSONObject result = new JSONObject();
-            String msg = "Add principal successfully.";
-            try {
-                hasAdmin.addPrincipal(principal.getValue(), password.getValue());
-                result.put("result", "success");
-                result.put("msg", msg);
-                return Response.ok(result.toString()).build();
-            } catch (Exception e) {
-                WebServer.LOG.error("Failed to add " + principal + " principal, because: " + e.getMessage());
-                msg = "Failed to add " + principal + " principal, because: " + e.getMessage();
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @POST
-    @Path("/renameprincipal")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response renamePrincipal(@QueryParam("oldprincipal") String oldPrincipal,
-                                    @QueryParam("newprincipal") String newPrincipal) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to rename " + oldPrincipal + " to " + newPrincipal);
-            JSONObject result = new JSONObject();
-            String msg = "Rename principal successfully.";
-            if (oldPrincipal != null && newPrincipal != null) {
-                LocalHasAdmin hasAdmin = null;
-                try {
-                    hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
-                } catch (KrbException e) {
-                    WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-                }
-                try {
-                    hasAdmin.renamePrincipal(oldPrincipal, newPrincipal);
-                    result.put("result", "success");
-                    result.put("msg", msg);
-                    return Response.ok(result.toString()).build();
-                } catch (Exception e) {
-                    WebServer.LOG.error("Failed to rename principal " + oldPrincipal + " to "
-                        + newPrincipal + ",because: " + e.getMessage());
-                    msg = "Failed to rename principal " + oldPrincipal + " to "
-                        + newPrincipal + ",because: " + e.getMessage();
-                }
-            } else {
-                WebServer.LOG.error("Value of old or new principal is null.");
-                msg = "Value of old or new principal is null.";
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Delete principal by name.
-     *
-     * @param principal principal like "admin" or "admin@HADOOP.COM".
-     * @return Response
-     */
-    @DELETE
-    @Path("/deleteprincipal")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response deleteprincipal(@QueryParam(PrincipalParam.NAME) @DefaultValue(PrincipalParam.DEFAULT)
-                                    final PrincipalParam principal) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to delete the principal named " + principal.getValue());
-            JSONObject result = new JSONObject();
-            String msg = "Delete principal successfully.";
-            LocalHasAdmin hasAdmin = null;
-            try {
-                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
-            } catch (KrbException e) {
-                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-            }
-            try {
-                hasAdmin.deletePrincipal(principal.getValue());
-                result.put("result", "success");
-                result.put("msg", msg);
-                return Response.ok(result.toString()).build();
-            } catch (Exception e) {
-                WebServer.LOG.error("Failed to delete the principal named " + principal.getValue()
-                    + ",because : " + e.getMessage());
-                msg = "Failed to delete the principal named " + principal.getValue()
-                    + ",because : " + e.getMessage();
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @PUT
-    @Path("/createprincipals")
-    @Consumes(MediaType.APPLICATION_JSON)
-    @Produces(MediaType.APPLICATION_JSON)
-    public Response createprincipals(@Context HttpServletRequest request) {
-        if (httpRequest.isSecure()) {
-            LocalHasAdmin hasAdmin = null;
-            try {
-                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
-            } catch (KrbException e) {
-                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-            }
-            JSONObject result = new JSONObject();
-            String msg = "";
-            try {
-                StringBuilder data = new StringBuilder();
-                BufferedReader br = new BufferedReader(new InputStreamReader(request.getInputStream()));
-                String s;
-                while ((s = br.readLine()) != null) {
-                    data.append(s);
-                }
-                WebServer.LOG.info("Request to create principals by JSON : \n" + data.toString());
-                JSONArray hostArray = new JSONObject(data.toString()).optJSONArray("HOSTS");
-                for (int i = 0; i < hostArray.length(); i++) {
-                    JSONObject host = (JSONObject) hostArray.get(i);
-                    String[] roles = host.getString("hostRoles").split(",");
-                    for (String role : roles) {
-                        msg += hasAdmin.addPrincByRole(host.getString("name"), role.toUpperCase());
-                    }
-                }
-                result.put("result", "success");
-                result.put("msg", msg);
-                return Response.ok(result.toString()).build();
-            } catch (Exception e) {
-                WebServer.LOG.error("Failed to create principals,because : " + e.getMessage());
-                msg = "Failed to create principals,because : " + e.getMessage();
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java
deleted file mode 100644
index a1eb958..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasAuthenException;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.hadoop.has.server.HasServerPlugin;
-import org.apache.hadoop.has.server.HasServerPluginRegistry;
-import org.apache.hadoop.has.server.kdc.HasKdcHandler;
-import org.apache.hadoop.has.server.web.HostRoleType;
-import org.apache.hadoop.has.server.web.WebServer;
-import org.apache.hadoop.has.server.web.rest.param.AuthTokenParam;
-import org.apache.hadoop.has.server.web.rest.param.TypeParam;
-import org.apache.hadoop.http.JettyUtils;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.KrbRuntime;
-import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
-import org.apache.kerby.kerberos.kerb.provider.TokenDecoder;
-import org.apache.kerby.kerberos.kerb.server.KdcUtil;
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.File;
-import java.io.IOException;
-import java.util.Map;
-import java.util.TreeMap;
-
-/**
- * HAS web methods implementation.
- */
-@Path("")
-public class HasApi {
-
-    @Context
-    private ServletContext context;
-
-    @Context
-    private HttpServletRequest httpRequest;
-
-    /**
-     * Get krb5.conf file.
-     *
-     * @return Response
-     */
-    @GET
-    @Path("/getkrb5conf")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response getKrb5Conf() {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            try {
-                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
-                String backendJar = backendConfig.getString("kdc_identity_backend");
-                File conf;
-                if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
-                    conf = hasServer.generateKrb5Conf();
-                } else {
-                    File confDir = hasServer.getConfDir();
-                    conf = new File(confDir, "krb5.conf");
-                }
-                return Response.ok(conf).header("Content-Disposition", "attachment; filename=krb5.conf").build();
-            } catch (KrbException | HasException e) {
-                throw new RuntimeException("Failed to get Krb5.conf. ", e);
-            }
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Get has-client.conf file.
-     *
-     * @return Response
-     */
-    @GET
-    @Path("/gethasconf")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response getHasConf() {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            try {
-                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
-                String backendJar = backendConfig.getString("kdc_identity_backend");
-                File conf;
-                if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
-                    conf = hasServer.generateHasConf();
-                } else {
-                    File confDir = hasServer.getConfDir();
-                    conf = new File(confDir, "has-server.conf");
-                }
-                return Response.ok(conf).header("Content-Disposition", "attachment; filename=has-client.conf").build();
-            } catch (IOException | KrbException | HasException e) {
-                throw new RuntimeException("Failed to get has-client.conf. ", e);
-            }
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Get CA file.
-     *
-     * @return Response
-     */
-    @GET
-    @Path("/getcert")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response getCert() {
-        final HasServer hasServer = WebServer.getHasServerFromContext(context);
-        String errMessage = null;
-        File cert = null;
-        try {
-            HasConfig hasConfig = HasUtil.getHasConfig(
-                new File(hasServer.getConfDir(), "has-server.conf"));
-            if (hasConfig != null) {
-                String certPath = hasConfig.getSslClientCert();
-                cert = new File(certPath);
-                if (!cert.exists()) {
-                    errMessage = "Cert file not found in HAS server.";
-                    WebServer.LOG.error("Cert file not found in HAS server.");
-                }
-            } else {
-                errMessage = "has-server.conf not found.";
-                WebServer.LOG.error("has-server.conf not found.");
-            }
-        } catch (HasException e) {
-            errMessage = "Failed to get cert file" + e.getMessage();
-            WebServer.LOG.error("Failed to get cert file" + e.getMessage());
-        }
-        if (errMessage == null) {
-            return Response.ok(cert).header("Content-Disposition",
-                "attachment;filename=" + cert.getName()).build();
-        } else {
-            return Response.status(Response.Status.NOT_FOUND).entity(errMessage).build();
-        }
-    }
-
-    @GET
-    @Path("/hostroles")
-    @Produces(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8)
-    public Response getRoles() {
-        if (httpRequest.isSecure()) {
-            JSONArray result = new JSONArray();
-            try {
-                for (HostRoleType role : HostRoleType.values()) {
-                    JSONObject jso = new JSONObject();
-                    jso.put("HostRole", role.getName());
-                    JSONArray jsa = new JSONArray();
-                    String[] princs = role.getPrincs();
-                    for (String princ : princs) {
-                        jsa.put(princ);
-                    }
-                    jso.put("PrincipalNames", jsa);
-                    result.put(jso);
-                }
-                return Response.ok(result.toString() + "\n").type(MediaType.APPLICATION_JSON).build();
-            } catch (Exception e) {
-                WebServer.LOG.error("Failed to get host roles." + e.getMessage());
-            }
-            return Response.serverError().build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @GET
-    @Path("/kdcinit")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response kdcInit() {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            try {
-                File adminKeytab = hasServer.initKdcServer();
-                return Response.ok(adminKeytab).header("Content-Disposition",
-                    "attachment; filename=" + adminKeytab.getName()).build();
-            } catch (KrbException e) {
-                System.err.println("[ERROR] " + e.getMessage());
-            }
-            return Response.serverError().build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @GET
-    @Path("/kdcstart")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response kdcStart() {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            JSONObject result = new JSONObject();
-            String msg = "Succeed in starting KDC server.";
-
-            try {
-                hasServer.startKdcServer();
-            } catch (HasException e) {
-                WebServer.LOG.error("Fail to start kdc server. " + e.getMessage());
-                msg = e.getMessage();
-            }
-            try {
-                result.put("result", "success");
-                result.put("msg", msg);
-                return Response.ok(result.toString()).build();
-            } catch (Exception e) {
-                WebServer.LOG.error(e.getMessage());
-                msg = e.getMessage();
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Handle HTTP PUT request.
-     */
-    @PUT
-    @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8,
-        MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8})
-    public Response asRequest(
-        @QueryParam(TypeParam.NAME) @DefaultValue(TypeParam.DEFAULT)
-        final TypeParam type,
-        @QueryParam(AuthTokenParam.NAME) @DefaultValue(AuthTokenParam.DEFAULT)
-        final AuthTokenParam authToken
-    ) {
-        return asRequest(type.getValue(), authToken.getValue());
-    }
-
-    private Response asRequest(String type, String tokenStr) {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            String errMessage = null;
-            String js = null;
-            ObjectMapper mapper = new ObjectMapper();
-            final Map<String, Object> m = new TreeMap<String, Object>();
-
-            if (hasServer.getKdcServer() == null) {
-                errMessage = "Please start the has KDC server.";
-            } else if (!tokenStr.isEmpty() && tokenStr != null) {
-                HasKdcHandler kdcHandler = new HasKdcHandler(hasServer);
-
-                TokenDecoder tokenDecoder = KrbRuntime.getTokenProvider("JWT").createTokenDecoder();
-
-                AuthToken authToken = null;
-                try {
-                    authToken = tokenDecoder.decodeFromString(tokenStr);
-                } catch (IOException e) {
-                    errMessage = "Failed to decode the token string." + e.getMessage();
-                    WebServer.LOG.error(errMessage);
-                }
-                HasServerPlugin tokenPlugin = null;
-                try {
-                    tokenPlugin = HasServerPluginRegistry.createPlugin(type);
-                } catch (HasException e) {
-                    errMessage = "Fail to get the plugin: " + type + ". " + e.getMessage();
-                    WebServer.LOG.error(errMessage);
-                }
-                AuthToken verifiedAuthToken;
-                try {
-                    verifiedAuthToken = tokenPlugin.authenticate(authToken);
-                } catch (HasAuthenException e) {
-                    errMessage = "Failed to verify auth token: " + e.getMessage();
-                    WebServer.LOG.error(errMessage);
-                    verifiedAuthToken = null;
-                }
-
-                if (verifiedAuthToken != null) {
-                    KrbMessage asRep = kdcHandler.getResponse(verifiedAuthToken,
-                        (String) verifiedAuthToken.getAttributes().get("passPhrase"));
-
-                    Base64 base64 = new Base64(0);
-                    try {
-                        m.put("type", tokenPlugin.getLoginType());
-                        m.put("success", "true");
-                        m.put("krbMessage", base64.encodeToString(asRep.encode()));
-                    } catch (IOException e) {
-                        errMessage = "Failed to encode KrbMessage." + e.getMessage();
-                        WebServer.LOG.error(errMessage);
-                    }
-
-                }
-            } else {
-                errMessage = "The token string should not be empty.";
-                WebServer.LOG.error(errMessage);
-            }
-
-            if (errMessage != null) {
-                m.put("success", "false");
-                m.put("krbMessage", errMessage);
-            }
-            try {
-                js = mapper.writeValueAsString(m);
-            } catch (JsonProcessingException e) {
-                WebServer.LOG.error("Failed write values to string." + e.getMessage());
-            }
-            return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java
deleted file mode 100644
index 1df0312..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class AuthTokenParam extends StringParam {
-  /**
-   * Parameter name.
-   */
-  public static final String NAME = "authToken";
-  /**
-   * Default parameter value.
-   */
-  public static final String DEFAULT = "";
-
-  private static final StringParam.Domain DOMAIN = new StringParam.Domain(NAME, null);
-
-  /**
-   * Constructor.
-   *
-   * @param str a string representation of the parameter value.
-   */
-  public AuthTokenParam(final String str) {
-    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java
deleted file mode 100644
index 6852ca7..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-import org.apache.hadoop.util.StringUtils;
-
-import java.util.Arrays;
-
-abstract class EnumParam<E extends Enum<E>>
-    extends Param<E, EnumParam.Domain<E>> {
-  EnumParam(final Domain<E> domain, final E value) {
-    super(domain, value);
-  }
-
-  /**
-   * The domain of the parameter.
-   */
-  static final class Domain<E extends Enum<E>> extends Param.Domain<E> {
-    private final Class<E> enumClass;
-
-    Domain(String name, Class<E> enumClass) {
-      super(name);
-      this.enumClass = enumClass;
-    }
-
-    @Override
-    public String getDomain() {
-      return Arrays.asList(enumClass.getEnumConstants()).toString();
-    }
-
-    @Override
-    E parse(String str) {
-      return Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java
deleted file mode 100644
index ee66ede..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class HostParam extends StringParam {
-  /**
-   * Parameter name.
-   */
-  public static final String NAME = "host";
-  /**
-   * Default parameter value.
-   */
-  public static final String DEFAULT = "";
-
-  private static final Domain DOMAIN = new Domain(NAME, null);
-
-  /**
-   * Constructor.
-   *
-   * @param str a string representation of the parameter value.
-   */
-  public HostParam(final String str) {
-    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java
deleted file mode 100644
index 07e481f..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class HostRoleParam extends StringParam {
-  /**
-   * Parameter name.
-   */
-  public static final String NAME = "role";
-  /**
-   * Default parameter value.
-   */
-  public static final String DEFAULT = "";
-
-  private static final Domain DOMAIN = new Domain(NAME, null);
-
-  /**
-   * Constructor.
-   *
-   * @param str a string representation of the parameter value.
-   */
-  public HostRoleParam(final String str) {
-    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java
deleted file mode 100644
index 5e43683..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-import java.io.UnsupportedEncodingException;
-import java.net.URLEncoder;
-import java.util.Arrays;
-import java.util.Comparator;
-
-/**
- * Base class of parameters.
- */
-public abstract class Param<T, D extends Param.Domain<T>> {
-  static final String NULL = "null";
-
-  static final Comparator<Param<?, ?>> NAME_CMP = new Comparator<Param<?, ?>>() {
-    @Override
-    public int compare(Param<?, ?> left, Param<?, ?> right) {
-      return left.getName().compareTo(right.getName());
-    }
-  };
-
-  /** Convert the parameters to a sorted String.
-   *
-   * @param separator URI parameter separator character
-   * @param parameters parameters to encode into a string
-   * @return the encoded URI string
-   */
-  public static String toSortedString(final String separator,
-                                      final Param<?, ?>... parameters) {
-    Arrays.sort(parameters, NAME_CMP);
-    final StringBuilder b = new StringBuilder();
-    try {
-      for (Param<?, ?> p : parameters) {
-        if (p.getValue() != null) {
-          b.append(separator)
-              .append(URLEncoder.encode(p.getName(), "UTF-8"))
-              .append("=")
-              .append(URLEncoder.encode(p.getValueString(), "UTF-8"));
-        }
-      }
-    } catch (UnsupportedEncodingException e) {
-      // Sane systems know about UTF-8, so this should never happen.
-      throw new RuntimeException(e);
-    }
-    return b.toString();
-  }
-
-  /** The domain of the parameter. */
-  final D domain;
-  /** The actual parameter value. */
-  final T value;
-
-  Param(final D domain, final T value) {
-    this.domain = domain;
-    this.value = value;
-  }
-
-  /** @return the parameter value. */
-  public final T getValue() {
-    return value;
-  }
-
-  /** @return the parameter value as a string */
-  public abstract String getValueString();
-
-  /** @return the parameter name. */
-  public abstract String getName();
-
-  @Override
-  public String toString() {
-    return getName() + "=" + value;
-  }
-
-  /** Base class of parameter domains. */
-  abstract static class Domain<T> {
-    /** Parameter name. */
-    final String paramName;
-
-    Domain(final String paramName) {
-      this.paramName = paramName;
-    }
-
-    /** @return the parameter name. */
-    public final String getParamName() {
-      return paramName;
-    }
-
-    /** @return a string description of the domain of the parameter. */
-    public abstract String getDomain();
-
-    /** @return the parameter value represented by the string. */
-    abstract T parse(String str);
-
-    /** Parse the given string.
-     * @return the parameter value represented by the string.
-     */
-    public final T parse(final String varName, final String str) {
-      try {
-        return str != null && str.trim().length() > 0 ? parse(str) : null;
-      } catch (Exception e) {
-        throw new IllegalArgumentException("Failed to parse \"" + str
-            + "\" for the parameter " + varName
-            + ".  The value must be in the domain " + getDomain(), e);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java
deleted file mode 100644
index 045cc96..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class PasswordParam extends StringParam {
-  /**
-   * Parameter name.
-   */
-  public static final String NAME = "password";
-  /**
-   * Default parameter value.
-   */
-  public static final String DEFAULT = "";
-
-  private static final Domain DOMAIN = new Domain(NAME, null);
-
-  /**
-   * Constructor.
-   *
-   * @param str a string representation of the parameter value.
-   */
-  public PasswordParam(final String str) {
-    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java
deleted file mode 100644
index cabca21..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class PrincipalParam extends StringParam {
-  /**
-   * Parameter name.
-   */
-  public static final String NAME = "principal";
-  /**
-   * Default parameter value.
-   */
-  public static final String DEFAULT = "";
-
-  private static final Domain DOMAIN = new Domain(NAME, null);
-
-  /**
-   * Constructor.
-   *
-   * @param str a string representation of the parameter value.
-   */
-  public PrincipalParam(final String str) {
-    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java
deleted file mode 100644
index b5eba07..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-import java.util.regex.Pattern;
-
-/**
- * String parameter.
- */
-abstract class StringParam extends Param<String, StringParam.Domain> {
-  StringParam(final Domain domain, String str) {
-    super(domain, domain.parse(str));
-  }
-
-  /**
-   * @return the parameter value as a string
-   */
-  @Override
-  public String getValueString() {
-    return value;
-  }
-
-  /**
-   * The domain of the parameter.
-   */
-  static final class Domain extends Param.Domain<String> {
-    /**
-     * The pattern defining the domain; null .
-     */
-    private final Pattern pattern;
-
-    Domain(final String paramName, final Pattern pattern) {
-      super(paramName);
-      this.pattern = pattern;
-    }
-
-    @Override
-    public String getDomain() {
-      return pattern == null ? "<String>" : pattern.pattern();
-    }
-
-    @Override
-    String parse(String str) {
-      if (str != null && pattern != null) {
-        if (!pattern.matcher(str).matches()) {
-          throw new IllegalArgumentException("Invalid value: \"" + str
-              + "\" does not belong to the domain " + getDomain());
-        }
-      }
-      return str;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java
deleted file mode 100644
index da208a1..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class TypeParam extends StringParam {
-
-    /**
-     * Parameter name.
-     */
-    public static final String NAME = "type";
-    /**
-     * Default parameter value.
-     */
-    public static final String DEFAULT = "";
-
-    private static final Domain DOMAIN = new Domain(NAME, null);
-
-    /**
-     * Constructor.
-     *
-     * @param str a string representation of the parameter value.
-     */
-    public TypeParam(final String str) {
-        super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-    }
-
-    @Override
-    public String getName() {
-        return NAME;
-    }
-}
-
-


[12/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosAuthenticator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosAuthenticator.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosAuthenticator.java
deleted file mode 100644
index c785430..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosAuthenticator.java
+++ /dev/null
@@ -1,359 +0,0 @@
-/**
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License. See accompanying LICENSE file.
- */
-package org.apache.hadoop.has.common.spnego;
-
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.has.common.util.ConnectionConfigurator;
-import org.ietf.jgss.GSSContext;
-import org.ietf.jgss.GSSManager;
-import org.ietf.jgss.GSSName;
-import org.ietf.jgss.Oid;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.security.auth.Subject;
-import javax.security.auth.kerberos.KerberosKey;
-import javax.security.auth.kerberos.KerberosTicket;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.security.AccessControlContext;
-import java.security.AccessController;
-import java.security.PrivilegedActionException;
-import java.security.PrivilegedExceptionAction;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.apache.hadoop.has.common.util.PlatformName.IBM_JAVA;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- * The {@link KerberosAuthenticator} implements the Kerberos SPNEGO authentication sequence.
- * <p>
- * It uses the default principal for the Kerberos cache (normally set via kinit).
- * <p>
- */
-public class KerberosAuthenticator implements Authenticator {
-  
-  private static final Logger LOG = LoggerFactory.getLogger(KerberosAuthenticator.class);
-
-  /**
-   * HTTP header used by the SPNEGO server endpoint during an authentication sequence.
-   */
-  public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
-
-  /**
-   * HTTP header used by the SPNEGO client endpoint during an authentication sequence.
-   */
-  public static final String AUTHORIZATION = "Authorization";
-
-  /**
-   * HTTP header prefix used by the SPNEGO client/server endpoints during an authentication sequence.
-   */
-  public static final String NEGOTIATE = "Negotiate";
-
-  private static final String AUTH_HTTP_METHOD = "OPTIONS";
-
-  private static String keytabPrincipal = null;
-  private static String keytabFile = null;
-
-  /*
-  * Defines the Kerberos configuration that will be used to obtain the Kerberos principal from the
-  * Kerberos cache.
-  */
-  private static class KerberosConfiguration extends Configuration {
-
-    private static final String OS_LOGIN_MODULE_NAME;
-    private static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");
-    private static final boolean IS_64_BIT = System.getProperty("os.arch").contains("64");
-    private static final boolean AIX = System.getProperty("os.name").equals("AIX");
-
-    /* Return the OS login module class name */
-    private static String getOSLoginModuleName() {
-      if (IBM_JAVA) {
-        if (WINDOWS) {
-          return IS_64_BIT ? "com.ibm.security.auth.module.Win64LoginModule"
-              : "com.ibm.security.auth.module.NTLoginModule";
-        } else if (AIX) {
-          return IS_64_BIT ? "com.ibm.security.auth.module.AIX64LoginModule"
-              : "com.ibm.security.auth.module.AIXLoginModule";
-        } else {
-          return "com.ibm.security.auth.module.LinuxLoginModule";
-        }
-      } else {
-        return WINDOWS ? "com.sun.security.auth.module.NTLoginModule"
-            : "com.sun.security.auth.module.UnixLoginModule";
-      }
-    }
-
-    static {
-      OS_LOGIN_MODULE_NAME = getOSLoginModuleName();
-    }
-
-    private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
-      new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
-                                AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                                new HashMap<String, String>());
-
-    private static final Map<String, String> KEYTAB_KERBEROS_OPTIONS
-        = new HashMap<String, String>();
-    static {
-      if (IBM_JAVA) {
-        KEYTAB_KERBEROS_OPTIONS.put("credsType", "both");
-        KEYTAB_KERBEROS_OPTIONS.put("useKeytab",
-            prependFileAuthority(keytabFile));
-      } else {
-        KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
-        KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true");
-        KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true");
-        KEYTAB_KERBEROS_OPTIONS.put("keyTab", keytabFile);
-      }
-      KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
-      KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
-      KEYTAB_KERBEROS_OPTIONS.put("debug", "false");
-    }
-
-    private static final AppConfigurationEntry USER_KERBEROS_LOGIN =
-      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
-                                AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL,
-                                KEYTAB_KERBEROS_OPTIONS);
-
-    private static final AppConfigurationEntry[] USER_KERBEROS_CONF =
-      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, USER_KERBEROS_LOGIN};
-
-    @Override
-    public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
-      return USER_KERBEROS_CONF;
-    }
-
-    private static String prependFileAuthority(String keytabPath) {
-      return keytabPath.startsWith("file://") ? keytabPath
-          : "file://" + keytabPath;
-    }
-  }
-  
-  private URL url;
-  private HttpURLConnection conn;
-  private Base64 base64;
-  private ConnectionConfigurator connConfigurator;
-
-  /**
-   * Sets a {@link ConnectionConfigurator} instance to use for
-   * configuring connections.
-   *
-   * @param configurator the {@link ConnectionConfigurator} instance.
-   */
-  @Override
-  public void setConnectionConfigurator(ConnectionConfigurator configurator) {
-    connConfigurator = configurator;
-  }
-
-  /**
-   * Performs SPNEGO authentication against the specified URL.
-   * <p>
-   * If a token is given it does a NOP and returns the given token.
-   * <p>
-   * If no token is given, it will perform the SPNEGO authentication sequence using an
-   * HTTP <code>OPTIONS</code> request.
-   *
-   * @param url the URl to authenticate against.
-   * @param token the authentication token being used for the user.
-   *
-   * @throws IOException if an IO error occurred.
-   * @throws AuthenticationException if an authentication error occurred.
-   */
-  @Override
-  public void authenticate(URL url, AuthenticatedURL.Token token)
-    throws IOException, AuthenticationException {
-
-    if (!token.isSet()) {
-      this.url = url;
-      base64 = new Base64(0);
-      conn = (HttpURLConnection) url.openConnection();
-      if (connConfigurator != null) {
-        conn = connConfigurator.configure(conn);
-      }
-      conn.setRequestMethod(AUTH_HTTP_METHOD);
-      conn.connect();
-      
-      boolean needFallback = false;
-      if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
-        LOG.debug("JDK performed authentication on our behalf.");
-        // If the JDK already did the SPNEGO back-and-forth for
-        // us, just pull out the token.
-        AuthenticatedURL.extractToken(conn, token);
-        if (isTokenKerberos(token)) {
-          return;
-        }
-        needFallback = true;
-      }
-      if (!needFallback && isNegotiate()) {
-        LOG.debug("Performing our own SPNEGO sequence.");
-        doSpnegoSequence(token);
-      } else {
-        throw new IOException("Should perform our own SPNEGO sequence");
-      }
-    }
-  }
-
-  public void setKeyTab(String keytabFile, String keytabPrincipal) {
-    this.keytabFile = keytabFile;
-    this.keytabPrincipal = keytabPrincipal;
-  }
-
-  /*
-   * Check if the passed token is of type "kerberos" or "kerberos-dt"
-   */
-  private boolean isTokenKerberos(AuthenticatedURL.Token token)
-      throws AuthenticationException {
-    if (token.isSet()) {
-      AuthToken aToken = AuthToken.parse(token.toString());
-      if (aToken.getType().equals("kerberos")
-          || aToken.getType().equals("kerberos-dt")) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  /*
-  * Indicates if the response is starting a SPNEGO negotiation.
-  */
-  private boolean isNegotiate() throws IOException {
-    boolean negotiate = false;
-    if (conn.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) {
-      String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
-      negotiate = authHeader != null && authHeader.trim().startsWith(NEGOTIATE);
-    }
-    return negotiate;
-  }
-
-  /**
-   * Implements the SPNEGO authentication sequence interaction using the current default principal
-   * in the Kerberos cache (normally set via kinit).
-   *
-   * @param token the authentication token being used for the user.
-   *
-   * @throws IOException if an IO error occurred.
-   * @throws AuthenticationException if an authentication error occurred.
-   */
-  private void doSpnegoSequence(AuthenticatedURL.Token token) throws IOException, AuthenticationException {
-    try {
-      AccessControlContext context = AccessController.getContext();
-      Subject subject = Subject.getSubject(context);
-      if (subject == null
-          || (subject.getPrivateCredentials(KerberosKey.class).isEmpty()
-              && subject.getPrivateCredentials(KerberosTicket.class).isEmpty())) {
-        LOG.debug("No subject in context, logging in");
-        subject = new Subject();
-        LoginContext login = new LoginContext("", subject,
-            null, new KerberosConfiguration());
-        login.login();
-      }
-
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("Using subject: " + subject);
-      }
-      Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
-
-        @Override
-        public Void run() throws Exception {
-          GSSContext gssContext = null;
-          try {
-            GSSManager gssManager = GSSManager.getInstance();
-            String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP",
-                KerberosAuthenticator.this.url.getHost());
-            LOG.info("service principal is:" + servicePrincipal);
-            Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
-            GSSName serviceName = gssManager.createName(servicePrincipal,
-                                                        oid);
-            oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
-            gssContext = gssManager.createContext(serviceName, oid, null,
-                                                  GSSContext.DEFAULT_LIFETIME);
-            gssContext.requestCredDeleg(true);
-            gssContext.requestMutualAuth(true);
-
-            byte[] inToken = new byte[0];
-            byte[] outToken;
-            boolean established = false;
-
-            // Loop while the context is still not established
-            while (!established) {
-              outToken = gssContext.initSecContext(inToken, 0, inToken.length);
-              if (outToken != null) {
-                sendToken(outToken);
-              }
-
-              if (!gssContext.isEstablished()) {
-                inToken = readToken();
-              } else {
-                established = true;
-              }
-            }
-          } finally {
-            if (gssContext != null) {
-              gssContext.dispose();
-              gssContext = null;
-            }
-          }
-          return null;
-        }
-      });
-    } catch (PrivilegedActionException ex) {
-      throw new AuthenticationException(ex.getException());
-    } catch (LoginException ex) {
-      throw new AuthenticationException(ex);
-    }
-    AuthenticatedURL.extractToken(conn, token);
-  }
-
-  /*
-  * Sends the Kerberos token to the server.
-  */
-  private void sendToken(byte[] outToken) throws IOException {
-    String token = base64.encodeToString(outToken);
-    conn = (HttpURLConnection) url.openConnection();
-    if (connConfigurator != null) {
-      conn = connConfigurator.configure(conn);
-    }
-    conn.setRequestMethod(AUTH_HTTP_METHOD);
-    conn.setRequestProperty(AUTHORIZATION, NEGOTIATE + " " + token);
-    conn.connect();
-  }
-
-  /*
-  * Retrieves the Kerberos token returned by the server.
-  */
-  private byte[] readToken() throws IOException, AuthenticationException {
-    int status = conn.getResponseCode();
-    if (status == HttpURLConnection.HTTP_OK || status == HttpURLConnection.HTTP_UNAUTHORIZED) {
-      String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
-      if (authHeader == null || !authHeader.trim().startsWith(NEGOTIATE)) {
-        throw new AuthenticationException("Invalid SPNEGO sequence, '" + WWW_AUTHENTICATE
-            + "' header incorrect: " + authHeader);
-      }
-      String negotiation = authHeader.trim().substring((NEGOTIATE + " ").length()).trim();
-      return base64.decode(negotiation);
-    }
-    throw new AuthenticationException("Invalid SPNEGO sequence, status code: " + status);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosHasAuthenticator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosHasAuthenticator.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosHasAuthenticator.java
deleted file mode 100644
index 7db0551..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosHasAuthenticator.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.common.spnego;
-
-public class KerberosHasAuthenticator extends KerberosAuthenticator {
-
-    public KerberosHasAuthenticator(String keytabFile, String keytabPrincipal) {
-        setKeyTab(keytabFile, keytabPrincipal);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosUtil.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosUtil.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosUtil.java
deleted file mode 100644
index b6e330d..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosUtil.java
+++ /dev/null
@@ -1,262 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.common.spnego;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Set;
-import java.util.regex.Pattern;
-
-import org.apache.kerby.kerberos.kerb.keytab.Keytab;
-import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
-import org.ietf.jgss.GSSException;
-import org.ietf.jgss.Oid;
-
-import javax.security.auth.Subject;
-import javax.security.auth.kerberos.KerberosTicket;
-import javax.security.auth.kerberos.KeyTab;
-
-import static org.apache.hadoop.has.common.util.PlatformName.IBM_JAVA;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-public class KerberosUtil {
-
-  /* Return the Kerberos login module name */
-  public static String getKrb5LoginModuleName() {
-    return (IBM_JAVA)
-      ? "com.ibm.security.auth.module.Krb5LoginModule"
-      : "com.sun.security.auth.module.Krb5LoginModule";
-  }
-
-  public static Oid getOidInstance(String oidName)
-      throws ClassNotFoundException, GSSException, NoSuchFieldException,
-      IllegalAccessException {
-    Class<?> oidClass;
-    if (IBM_JAVA) {
-      if ("NT_GSS_KRB5_PRINCIPAL".equals(oidName)) {
-        // IBM JDK GSSUtil class does not have field for krb5 principal oid
-        return new Oid("1.2.840.113554.1.2.2.1");
-      }
-      oidClass = Class.forName("com.ibm.security.jgss.GSSUtil");
-    } else {
-      oidClass = Class.forName("sun.security.jgss.GSSUtil");
-    }
-    Field oidField = oidClass.getDeclaredField(oidName);
-    return (Oid) oidField.get(oidClass);
-  }
-
-  public static String getDefaultRealm() 
-      throws ClassNotFoundException, NoSuchMethodException, 
-      IllegalArgumentException, IllegalAccessException, 
-      InvocationTargetException {
-    Object kerbConf;
-    Class<?> classRef;
-    Method getInstanceMethod;
-    Method getDefaultRealmMethod;
-    if (IBM_JAVA) {
-      classRef = Class.forName("com.ibm.security.krb5.internal.Config");
-    } else {
-      classRef = Class.forName("sun.security.krb5.Config");
-    }
-    getInstanceMethod = classRef.getMethod("getInstance", new Class[0]);
-    kerbConf = getInstanceMethod.invoke(classRef, new Object[0]);
-    getDefaultRealmMethod = classRef.getDeclaredMethod("getDefaultRealm",
-        new Class[0]);
-    return (String) getDefaultRealmMethod.invoke(kerbConf, new Object[0]);
-  }
-
-  public static String getDefaultRealmProtected() {
-    String realmString = null;
-    try {
-      realmString = getDefaultRealm();
-    } catch (RuntimeException rte) {
-      //silently catch everything
-    } catch (Exception e) {
-      //silently return null
-    }
-    return realmString;
-  }
-
-  /*
-   * For a Service Host Principal specification, map the host's domain
-   * to kerberos realm, as specified by krb5.conf [domain_realm] mappings.
-   * Unfortunately the mapping routines are private to the security.krb5
-   * package, so have to construct a PrincipalName instance to derive the realm.
-   *
-   * Many things can go wrong with Kerberos configuration, and this is not
-   * the place to be throwing exceptions to help debug them.  Nor do we choose
-   * to make potentially voluminous logs on every call to a communications API.
-   * So we simply swallow all exceptions from the underlying libraries and
-   * return null if we can't get a good value for the realmString.
-   *
-   * @param shortprinc A service principal name with host fqdn as instance, e.g.
-   *     "HTTP/myhost.mydomain"
-   * @return String value of Kerberos realm, mapped from host fqdn
-   *     May be default realm, or may be null.
-   */
-  public static String getDomainRealm(String shortprinc) {
-    Class<?> classRef;
-    Object principalName; //of type sun.security.krb5.PrincipalName or IBM equiv
-    String realmString = null;
-    try {
-      if (IBM_JAVA) {
-        classRef = Class.forName("com.ibm.security.krb5.PrincipalName");
-      } else {
-        classRef = Class.forName("sun.security.krb5.PrincipalName");
-      }
-      int tKrbNtSrvHst = classRef.getField("KRB_NT_SRV_HST").getInt(null);
-      principalName = classRef.getConstructor(String.class, int.class).
-          newInstance(shortprinc, tKrbNtSrvHst);
-      realmString = (String) classRef.getMethod("getRealmString", new Class[0]).
-          invoke(principalName, new Object[0]);
-    } catch (RuntimeException rte) {
-      //silently catch everything
-    } catch (Exception e) {
-      //silently return default realm (which may itself be null)
-    }
-    if (null == realmString || realmString.equals("")) {
-      return getDefaultRealmProtected();
-    } else {
-      return realmString;
-    }
-  }
-
-  /* Return fqdn of the current host */
-  static String getLocalHostName() throws UnknownHostException {
-    return InetAddress.getLocalHost().getCanonicalHostName();
-  }
-  
-  /**
-   * Create Kerberos principal for a given service and hostname,
-   * inferring realm from the fqdn of the hostname. It converts
-   * hostname to lower case. If hostname is null or "0.0.0.0", it uses
-   * dynamically looked-up fqdn of the current host instead.
-   * If domain_realm mappings are inadequately specified, it will
-   * use default_realm, per usual Kerberos behavior.
-   * If default_realm also gives a null value, then a principal
-   * without realm will be returned, which by Kerberos definitions is
-   * just another way to specify default realm.
-   *
-   * @param service
-   *          Service for which you want to generate the principal.
-   * @param hostname
-   *          Fully-qualified domain name.
-   * @return Converted Kerberos principal name.
-   * @throws UnknownHostException
-   *           If no IP address for the local host could be found.
-   */
-  public static final String getServicePrincipal(String service,
-      String hostname)
-      throws UnknownHostException {
-    String fqdn = hostname;
-    String shortprinc = null;
-    String realmString = null;
-    if (null == fqdn || fqdn.equals("") || fqdn.equals("0.0.0.0")) {
-      fqdn = getLocalHostName();
-    }
-    // convert hostname to lowercase as kerberos does not work with hostnames
-    // with uppercase characters.
-    fqdn = fqdn.toLowerCase(Locale.US);
-    shortprinc = service + "/" + fqdn;
-    // Obtain the realm name inferred from the domain of the host
-    realmString = getDomainRealm(shortprinc);
-    if (null == realmString || realmString.equals("")) {
-      return shortprinc;
-    } else {
-      return shortprinc + "@" + realmString;
-    }
-  }
-
-  /**
-   * Get all the unique principals present in the keytabfile.
-   * 
-   * @param keytabFileName 
-   *          Name of the keytab file to be read.
-   * @return list of unique principals in the keytab.
-   * @throws IOException 
-   *          If keytab entries cannot be read from the file.
-   */
-  static final String[] getPrincipalNames(String keytabFileName) throws IOException {
-    Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
-    Set<String> principals = new HashSet<String>();
-    List<PrincipalName> entries = keytab.getPrincipals();
-    for (PrincipalName entry : entries) {
-      principals.add(entry.getName().replace("\\", "/"));
-    }
-    return principals.toArray(new String[0]);
-  }
-
-  /**
-   * Get all the unique principals from keytabfile which matches a pattern.
-   * 
-   * @param keytab Name of the keytab file to be read.
-   * @param pattern pattern to be matched.
-   * @return list of unique principals which matches the pattern.
-   * @throws IOException if cannot get the principal name
-   */
-  public static final String[] getPrincipalNames(String keytab,
-      Pattern pattern) throws IOException {
-    String[] principals = getPrincipalNames(keytab);
-    if (principals.length != 0) {
-      List<String> matchingPrincipals = new ArrayList<String>();
-      for (String principal : principals) {
-        if (pattern.matcher(principal).matches()) {
-          matchingPrincipals.add(principal);
-        }
-      }
-      principals = matchingPrincipals.toArray(new String[0]);
-    }
-    return principals;
-  }
-
-  /**
-   * Check if the subject contains Kerberos keytab related objects.
-   * The Kerberos keytab object attached in subject has been changed
-   * from KerberosKey (JDK 7) to KeyTab (JDK 8)
-   *
-   *
-   * @param subject subject to be checked
-   * @return true if the subject contains Kerberos keytab
-   */
-  public static boolean hasKerberosKeyTab(Subject subject) {
-    return !subject.getPrivateCredentials(KeyTab.class).isEmpty();
-  }
-
-  /**
-   * Check if the subject contains Kerberos ticket.
-   *
-   *
-   * @param subject subject to be checked
-   * @return true if the subject contains Kerberos ticket
-   */
-  public static boolean hasKerberosTicket(Subject subject) {
-    return !subject.getPrivateCredentials(KerberosTicket.class).isEmpty();
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/KeyStoresFactory.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/KeyStoresFactory.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/KeyStoresFactory.java
deleted file mode 100644
index 83ad9a1..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/KeyStoresFactory.java
+++ /dev/null
@@ -1,254 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-package org.apache.hadoop.has.common.ssl;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.util.StringUtils;
-import org.apache.kerby.kerberos.kerb.client.KrbConfig;
-
-import javax.net.ssl.KeyManager;
-import javax.net.ssl.KeyManagerFactory;
-import javax.net.ssl.TrustManager;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.GeneralSecurityException;
-import java.security.KeyStore;
-import java.text.MessageFormat;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- * Interface that gives access to {@link KeyManager} and {@link TrustManager}
- * implementations.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class KeyStoresFactory extends KrbConfig {
-
-  private static final Log LOG =
-    LogFactory.getLog(KeyStoresFactory.class);
-
-  public static final String SSL_KEYSTORE_LOCATION_TPL_KEY =
-    "ssl.{0}.keystore.location";
-  public static final String SSL_KEYSTORE_PASSWORD_TPL_KEY =
-    "ssl.{0}.keystore.password";
-  public static final String SSL_KEYSTORE_KEYPASSWORD_TPL_KEY =
-    "ssl.{0}.keystore.keypassword";
-  public static final String SSL_KEYSTORE_TYPE_TPL_KEY =
-    "ssl.{0}.keystore.type";
-
-  public static final String SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY =
-    "ssl.{0}.truststore.reload.interval";
-  public static final String SSL_TRUSTSTORE_LOCATION_TPL_KEY =
-    "ssl.{0}.truststore.location";
-  public static final String SSL_TRUSTSTORE_PASSWORD_TPL_KEY =
-    "ssl.{0}.truststore.password";
-  public static final String SSL_TRUSTSTORE_TYPE_TPL_KEY =
-    "ssl.{0}.truststore.type";
-
-  /**
-   * Default format of the keystore files.
-   */
-  public static final String DEFAULT_KEYSTORE_TYPE = "jks";
-
-  /**
-   * Reload interval in milliseconds.
-   */
-  public static final long DEFAULT_SSL_TRUSTSTORE_RELOAD_INTERVAL = 10000;
-
-  private HasConfig conf;
-  private KeyManager[] keyManagers;
-  private TrustManager[] trustManagers;
-  private ReloadingX509TrustManager trustManager;
-
-  /**
-   * Sets the configuration for the factory.
-   *
-   * @param conf the configuration for the factory.
-   */
-  public void setConf(HasConfig conf) {
-    this.conf = conf;
-  }
-
-  /**
-   * Returns the configuration of the factory.
-   *
-   * @return the configuration of the factory.
-   */
-  public HasConfig getConf() {
-    return conf;
-  }
-
-
-  /**
-   * Initializes the keystores of the factory.
-   *
-   * @param mode if the keystores are to be used in client or server mode.
-   * @throws IOException thrown if the keystores could not be initialized due
-   * to an IO error.
-   * @throws GeneralSecurityException thrown if the keystores could not be
-   * initialized due to an security error.
-   */
-  public void init(SSLFactory.Mode mode) throws IOException, GeneralSecurityException {
-     boolean requireClientCert =
-      conf.getBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY,
-          SSLFactory.DEFAULT_SSL_REQUIRE_CLIENT_CERT);
-
-    // certificate store
-    String keystoreType =
-      conf.getString(resolvePropertyName(mode, SSL_KEYSTORE_TYPE_TPL_KEY),
-               DEFAULT_KEYSTORE_TYPE);
-    KeyStore keystore = KeyStore.getInstance(keystoreType);
-    String keystoreKeyPassword = null;
-    if (requireClientCert || mode == SSLFactory.Mode.SERVER) {
-      String locationProperty =
-        resolvePropertyName(mode, SSL_KEYSTORE_LOCATION_TPL_KEY);
-      String keystoreLocation = conf.getString(locationProperty, "");
-      if (keystoreLocation.isEmpty()) {
-        throw new GeneralSecurityException("The property '" + locationProperty
-            + "' has not been set in the ssl configuration file.");
-      }
-      String passwordProperty =
-        resolvePropertyName(mode, SSL_KEYSTORE_PASSWORD_TPL_KEY);
-      String keystorePassword = getPassword(conf, passwordProperty, "");
-      if (keystorePassword.isEmpty()) {
-        throw new GeneralSecurityException("The property '" + passwordProperty
-            + "' has not been set in the ssl configuration file.");
-      }
-      String keyPasswordProperty =
-        resolvePropertyName(mode, SSL_KEYSTORE_KEYPASSWORD_TPL_KEY);
-      // Key password defaults to the same value as store password for
-      // compatibility with legacy configurations that did not use a separate
-      // configuration property for key password.
-      keystoreKeyPassword = getPassword(
-          conf, keyPasswordProperty, keystorePassword);
-      LOG.debug(mode.toString() + " KeyStore: " + keystoreLocation);
-
-      InputStream is = new FileInputStream(keystoreLocation);
-      try {
-        keystore.load(is, keystorePassword.toCharArray());
-      } finally {
-        is.close();
-      }
-      LOG.debug(mode.toString() + " Loaded KeyStore: " + keystoreLocation);
-    } else {
-      keystore.load(null, null);
-    }
-    KeyManagerFactory keyMgrFactory = KeyManagerFactory
-        .getInstance(SSLFactory.SSLCERTIFICATE);
-
-    keyMgrFactory.init(keystore, (keystoreKeyPassword != null)
-        ? keystoreKeyPassword.toCharArray() : null);
-    keyManagers = keyMgrFactory.getKeyManagers();
-
-    //trust store
-    String truststoreType =
-      conf.getString(resolvePropertyName(mode, SSL_TRUSTSTORE_TYPE_TPL_KEY),
-               DEFAULT_KEYSTORE_TYPE);
-
-    String locationProperty =
-      resolvePropertyName(mode, SSL_TRUSTSTORE_LOCATION_TPL_KEY);
-    String truststoreLocation = conf.getString(locationProperty, "");
-    if (!truststoreLocation.isEmpty()) {
-      String passwordProperty = resolvePropertyName(mode,
-          SSL_TRUSTSTORE_PASSWORD_TPL_KEY);
-      String truststorePassword = getPassword(conf, passwordProperty, "");
-      if (truststorePassword.isEmpty()) {
-        throw new GeneralSecurityException("The property '" + passwordProperty
-            + "' has not been set in the ssl configuration file.");
-      }
-      long truststoreReloadInterval =
-          conf.getLong(resolvePropertyName(mode, SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY),
-              DEFAULT_SSL_TRUSTSTORE_RELOAD_INTERVAL);
-
-      LOG.debug(mode.toString() + " TrustStore: " + truststoreLocation);
-
-      trustManager = new ReloadingX509TrustManager(truststoreType,
-          truststoreLocation,
-          truststorePassword,
-          truststoreReloadInterval);
-      trustManager.init();
-      LOG.debug(mode.toString() + " Loaded TrustStore: " + truststoreLocation);
-      trustManagers = new TrustManager[]{trustManager};
-    } else {
-      LOG.debug("The property '" + locationProperty + "' has not been set, "
-          + "no TrustStore will be loaded");
-      trustManagers = null;
-    }
-  }
-
-  String getPassword(HasConfig conf, String alias, String defaultPass) {
-    String password = defaultPass;
-    password = conf.getString(alias);
-    return password;
-  }
-
-  /**
-   * Releases any resources being used.
-   */
-  public void destroy() {
-    if (trustManager != null) {
-      trustManager.destroy();
-      trustManager = null;
-      keyManagers = null;
-      trustManagers = null;
-    }
-  }
-
-  /**
-   * Returns the keymanagers for owned certificates.
-   *
-   * @return the keymanagers for owned certificates.
-   */
-  public KeyManager[] getKeyManagers() {
-    return keyManagers;
-  }
-
-  /**
-   * Returns the trustmanagers for trusted certificates.
-   *
-   * @return the trustmanagers for trusted certificates.
-   */
-  public TrustManager[] getTrustManagers() {
-    return trustManagers;
-  }
-
-    /**
-   * Resolves a property name to its client/server version if applicable.
-   * <p/>
-   * NOTE: This method is public for testing purposes.
-   *
-   * @param mode client/server mode.
-   * @param template property name template.
-   * @return the resolved property name.
-   */
-  @VisibleForTesting
-  public static String resolvePropertyName(SSLFactory.Mode mode,
-                                           String template) {
-    return MessageFormat.format(
-        template, StringUtils.toLowerCase(mode.toString()));
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/ReloadingX509TrustManager.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/ReloadingX509TrustManager.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/ReloadingX509TrustManager.java
deleted file mode 100644
index 2aa2e6c..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/ReloadingX509TrustManager.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.has.common.ssl;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-import javax.net.ssl.TrustManager;
-import javax.net.ssl.TrustManagerFactory;
-import javax.net.ssl.X509TrustManager;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.security.GeneralSecurityException;
-import java.security.KeyStore;
-import java.security.cert.CertificateException;
-import java.security.cert.X509Certificate;
-import java.util.concurrent.atomic.AtomicReference;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- * A {@link TrustManager} implementation that reloads its configuration when
- * the truststore file on disk changes.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public final class ReloadingX509TrustManager
-  implements X509TrustManager, Runnable {
-
-  private static final Log LOG =
-    LogFactory.getLog(ReloadingX509TrustManager.class);
-
-  private String type;
-  private File file;
-  private String password;
-  private long lastLoaded;
-  private long reloadInterval;
-  private AtomicReference<X509TrustManager> trustManagerRef;
-
-  private volatile boolean running;
-  private Thread reloader;
-
-  /**
-   * Creates a reloadable trustmanager. The trustmanager reloads itself
-   * if the underlying trustore file has changed.
-   *
-   * @param type type of truststore file, typically 'jks'.
-   * @param location local path to the truststore file.
-   * @param password password of the truststore file.
-   * @param reloadInterval interval to check if the truststore file has
-   * changed, in milliseconds.
-   * @throws IOException thrown if the truststore could not be initialized due
-   * to an IO error.
-   * @throws GeneralSecurityException thrown if the truststore could not be
-   * initialized due to a security error.
-   */
-  public ReloadingX509TrustManager(String type, String location,
-                                   String password, long reloadInterval)
-    throws IOException, GeneralSecurityException {
-    this.type = type;
-    file = new File(location);
-    this.password = password;
-    trustManagerRef = new AtomicReference<X509TrustManager>();
-    trustManagerRef.set(loadTrustManager());
-    this.reloadInterval = reloadInterval;
-  }
-
-  /**
-   * Starts the reloader thread.
-   */
-  public void init() {
-    reloader = new Thread(this, "Truststore reloader thread");
-    reloader.setDaemon(true);
-    running =  true;
-    reloader.start();
-  }
-
-  /**
-   * Stops the reloader thread.
-   */
-  public void destroy() {
-    running = false;
-    reloader.interrupt();
-  }
-
-  /**
-   * Returns the reload check interval.
-   *
-   * @return the reload check interval, in milliseconds.
-   */
-  public long getReloadInterval() {
-    return reloadInterval;
-  }
-
-  @Override
-  public void checkClientTrusted(X509Certificate[] chain, String authType)
-    throws CertificateException {
-    X509TrustManager tm = trustManagerRef.get();
-    if (tm != null) {
-      tm.checkClientTrusted(chain, authType);
-    } else {
-      throw new CertificateException("Unknown client chain certificate: "
-          + chain[0].toString());
-    }
-  }
-
-  @Override
-  public void checkServerTrusted(X509Certificate[] chain, String authType)
-    throws CertificateException {
-    X509TrustManager tm = trustManagerRef.get();
-    if (tm != null) {
-      tm.checkServerTrusted(chain, authType);
-    } else {
-      throw new CertificateException("Unknown server chain certificate: "
-          + chain[0].toString());
-    }
-  }
-
-  private static final X509Certificate[] EMPTY = new X509Certificate[0];
-  @Override
-  public X509Certificate[] getAcceptedIssuers() {
-    X509Certificate[] issuers = EMPTY;
-    X509TrustManager tm = trustManagerRef.get();
-    if (tm != null) {
-      issuers = tm.getAcceptedIssuers();
-    }
-    return issuers;
-  }
-
-  boolean needsReload() {
-    boolean reload = true;
-    if (file.exists()) {
-      if (file.lastModified() == lastLoaded) {
-        reload = false;
-      }
-    } else {
-      lastLoaded = 0;
-    }
-    return reload;
-  }
-
-  X509TrustManager loadTrustManager()
-  throws IOException, GeneralSecurityException {
-    X509TrustManager trustManager = null;
-    KeyStore ks = KeyStore.getInstance(type);
-    lastLoaded = file.lastModified();
-    FileInputStream in = new FileInputStream(file);
-    try {
-      ks.load(in, password.toCharArray());
-      LOG.debug("Loaded truststore '" + file + "'");
-    } finally {
-      in.close();
-    }
-
-    TrustManagerFactory trustManagerFactory = 
-      TrustManagerFactory.getInstance(SSLFactory.SSLCERTIFICATE);
-    trustManagerFactory.init(ks);
-    TrustManager[] trustManagers = trustManagerFactory.getTrustManagers();
-    for (TrustManager trustManager1 : trustManagers) {
-      if (trustManager1 instanceof X509TrustManager) {
-        trustManager = (X509TrustManager) trustManager1;
-        break;
-      }
-    }
-    return trustManager;
-  }
-
-  @Override
-  public void run() {
-    while (running) {
-      try {
-        Thread.sleep(reloadInterval);
-      } catch (InterruptedException e) {
-        //NOP
-      }
-      if (running && needsReload()) {
-        try {
-          trustManagerRef.set(loadTrustManager());
-        } catch (Exception ex) {
-          LOG.warn("Could not load truststore (keep using existing one) : "
-              + ex.toString(), ex);
-        }
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLFactory.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLFactory.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLFactory.java
deleted file mode 100644
index dcb5140..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLFactory.java
+++ /dev/null
@@ -1,290 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-package org.apache.hadoop.has.common.ssl;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.ConnectionConfigurator;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.util.StringUtils;
-
-import javax.net.ssl.HostnameVerifier;
-import javax.net.ssl.HttpsURLConnection;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.SSLEngine;
-import javax.net.ssl.SSLServerSocketFactory;
-import javax.net.ssl.SSLSocketFactory;
-import java.io.File;
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.security.GeneralSecurityException;
-
-import static org.apache.hadoop.has.common.util.PlatformName.IBM_JAVA;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- * Factory that creates SSLEngine and SSLSocketFactory instances using
- * Hadoop configuration information.
- * <p/>
- * which reloads public keys if the truststore file changes.
- * <p/>
- * This factory is used to configure HTTPS in Hadoop HTTP based endpoints, both
- * client and server.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class SSLFactory implements ConnectionConfigurator {
-
-  @InterfaceAudience.Private
-  public enum Mode {
-    CLIENT, SERVER
-  }
-
-  public static final String SSL_REQUIRE_CLIENT_CERT_KEY =
-    "hadoop.ssl.require.client.CERT";
-  public static final String SSL_HOSTNAME_VERIFIER_KEY =
-    "hadoop.ssl.hostname.verifier";
-  public static final String SSL_CLIENT_CONF_KEY =
-    "hadoop.ssl.client.conf";
-  public static final String SSL_SERVER_CONF_KEY =
-      "hadoop.ssl.server.conf";
-  public static final String SSLCERTIFICATE = IBM_JAVA ? "ibmX509" : "SunX509";
-
-  public static final boolean DEFAULT_SSL_REQUIRE_CLIENT_CERT = false;
-
-  public static final String KEYSTORES_FACTORY_CLASS_KEY =
-    "hadoop.ssl.keystores.factory.class";
-
-  public static final String SSL_ENABLED_PROTOCOLS =
-      "hadoop.ssl.enabled.protocols";
-  public static final String DEFAULT_SSL_ENABLED_PROTOCOLS = "TLSv1";
-
-  private HasConfig conf;
-  private Mode mode;
-  private boolean requireClientCert;
-  private SSLContext context;
-  private HostnameVerifier hostnameVerifier;
-  private KeyStoresFactory keystoresFactory;
-
-  private String[] enabledProtocols = null;
-
-  /**
-   * Creates an SSLFactory.
-   *
-   * @param mode SSLFactory mode, client or server.
-   * @param conf Hadoop configuration from where the SSLFactory configuration
-   * will be read.
-   */
-  public SSLFactory(Mode mode, HasConfig conf) throws HasException {
-    this.conf = conf;
-    if (mode == null) {
-      throw new IllegalArgumentException("mode cannot be NULL");
-    }
-    this.mode = mode;
-    requireClientCert = conf.getBoolean(SSL_REQUIRE_CLIENT_CERT_KEY,
-                                        DEFAULT_SSL_REQUIRE_CLIENT_CERT);
-    HasConfig sslConf = readSSLConfiguration(mode);
-
-    keystoresFactory = new KeyStoresFactory();
-    keystoresFactory.setConf(sslConf);
-
-    enabledProtocols = new String[] {DEFAULT_SSL_ENABLED_PROTOCOLS};
-  }
-
-  private HasConfig readSSLConfiguration(Mode mode) throws HasException {
-    HasConfig sslConf = new HasConfig();
-    sslConf.setBoolean(SSL_REQUIRE_CLIENT_CERT_KEY, requireClientCert);
-    String sslConfResource;
-    if (mode == Mode.CLIENT) {
-      sslConfResource = conf.getString(SSLFactory.SSL_CLIENT_CONF_KEY);
-    } else {
-      sslConfResource = conf.getString(SSLFactory.SSL_CLIENT_CONF_KEY);
-    }
-    try {
-      sslConf.addIniConfig(new File(sslConfResource));
-    } catch (IOException e) {
-      throw new HasException(e);
-    }
-    return sslConf;
-  }
-
-  /**
-   * Initializes the factory.
-   *
-   * @throws  GeneralSecurityException thrown if an SSL initialization error
-   * happened.
-   * @throws IOException thrown if an IO error happened while reading the SSL
-   * configuration.
-   */
-  public void init() throws GeneralSecurityException, IOException {
-    keystoresFactory.init(mode);
-    context = SSLContext.getInstance("TLS");
-    context.init(keystoresFactory.getKeyManagers(),
-                 keystoresFactory.getTrustManagers(), null);
-    context.getDefaultSSLParameters().setProtocols(enabledProtocols);
-    hostnameVerifier = getHostnameVerifier(conf);
-  }
-
-  private HostnameVerifier getHostnameVerifier(HasConfig conf)
-      throws GeneralSecurityException, IOException {
-    return getHostnameVerifier(StringUtils.toUpperCase(
-        conf.getString(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").trim()));
-  }
-
-  public static HostnameVerifier getHostnameVerifier(String verifier)
-    throws GeneralSecurityException, IOException {
-    HostnameVerifier hostnameVerifier;
-    if (verifier.equals("DEFAULT")) {
-      hostnameVerifier = SSLHostnameVerifier.DEFAULT;
-    } else if (verifier.equals("DEFAULT_AND_LOCALHOST")) {
-      hostnameVerifier = SSLHostnameVerifier.DEFAULT_AND_LOCALHOST;
-    } else if (verifier.equals("STRICT")) {
-      hostnameVerifier = SSLHostnameVerifier.STRICT;
-    } else if (verifier.equals("STRICT_IE6")) {
-      hostnameVerifier = SSLHostnameVerifier.STRICT_IE6;
-    } else if (verifier.equals("ALLOW_ALL")) {
-      hostnameVerifier = SSLHostnameVerifier.ALLOW_ALL;
-    } else {
-      throw new GeneralSecurityException("Invalid hostname verifier: "
-          + verifier);
-    }
-    return hostnameVerifier;
-  }
-
-  /**
-   * Releases any resources being used.
-   */
-  public void destroy() {
-    keystoresFactory.destroy();
-  }
-  /**
-   * Returns the SSLFactory KeyStoresFactory instance.
-   *
-   * @return the SSLFactory KeyStoresFactory instance.
-   */
-  public KeyStoresFactory getKeystoresFactory() {
-    return keystoresFactory;
-  }
-
-  /**
-   * Returns a configured SSLEngine.
-   *
-   * @return the configured SSLEngine.
-   * @throws GeneralSecurityException thrown if the SSL engine could not
-   * be initialized.
-   * @throws IOException thrown if and IO error occurred while loading
-   * the server keystore.
-   */
-  public SSLEngine createSSLEngine()
-    throws GeneralSecurityException, IOException {
-    SSLEngine sslEngine = context.createSSLEngine();
-    if (mode == Mode.CLIENT) {
-      sslEngine.setUseClientMode(true);
-    } else {
-      sslEngine.setUseClientMode(false);
-      sslEngine.setNeedClientAuth(requireClientCert);
-    }
-    sslEngine.setEnabledProtocols(enabledProtocols);
-    return sslEngine;
-  }
-
-  /**
-   * Returns a configured SSLServerSocketFactory.
-   *
-   * @return the configured SSLSocketFactory.
-   * @throws GeneralSecurityException thrown if the SSLSocketFactory could not
-   * be initialized.
-   * @throws IOException thrown if and IO error occurred while loading
-   * the server keystore.
-   */
-  public SSLServerSocketFactory createSSLServerSocketFactory()
-    throws GeneralSecurityException, IOException {
-    if (mode != Mode.SERVER) {
-      throw new IllegalStateException("Factory is in CLIENT mode");
-    }
-    return context.getServerSocketFactory();
-  }
-
-  /**
-   * Returns a configured SSLSocketFactory.
-   *
-   * @return the configured SSLSocketFactory.
-   * @throws GeneralSecurityException thrown if the SSLSocketFactory could not
-   * be initialized.
-   * @throws IOException thrown if and IO error occurred while loading
-   * the server keystore.
-   */
-  public SSLSocketFactory createSSLSocketFactory()
-    throws GeneralSecurityException, IOException {
-    if (mode != Mode.CLIENT) {
-      throw new IllegalStateException("Factory is in CLIENT mode");
-    }
-    return context.getSocketFactory();
-  }
-
-  /**
-   * Returns the hostname verifier it should be used in HttpsURLConnections.
-   *
-   * @return the hostname verifier.
-   */
-  public HostnameVerifier getHostnameVerifier() {
-    if (mode != Mode.CLIENT) {
-      throw new IllegalStateException("Factory is in CLIENT mode");
-    }
-    return hostnameVerifier;
-  }
-
-  /**
-   * Returns if client certificates are required or not.
-   *
-   * @return if client certificates are required or not.
-   */
-  public boolean isClientCertRequired() {
-    return requireClientCert;
-  }
-
-  /**
-   * If the given {@link HttpURLConnection} is an {@link HttpsURLConnection}
-   * configures the connection with the {@link SSLSocketFactory} and
-   * {@link HostnameVerifier} of this SSLFactory, otherwise does nothing.
-   *
-   * @param conn the {@link HttpURLConnection} instance to configure.
-   * @return the configured {@link HttpURLConnection} instance.
-   *
-   * @throws IOException if an IO error occurred.
-   */
-  @Override
-  public HttpURLConnection configure(HttpURLConnection conn)
-    throws IOException {
-    if (conn instanceof HttpsURLConnection) {
-      HttpsURLConnection sslConn = (HttpsURLConnection) conn;
-      try {
-        sslConn.setSSLSocketFactory(createSSLSocketFactory());
-      } catch (GeneralSecurityException ex) {
-        throw new IOException(ex);
-      }
-      sslConn.setHostnameVerifier(getHostnameVerifier());
-      conn = sslConn;
-    }
-    return conn;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLHostnameVerifier.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLHostnameVerifier.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLHostnameVerifier.java
deleted file mode 100644
index 86d6734..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLHostnameVerifier.java
+++ /dev/null
@@ -1,615 +0,0 @@
-/*
- * $HeadURL$
- * $Revision$
- * $Date$
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- *
- * This software consists of voluntary contributions made by many
- * individuals on behalf of the Apache Software Foundation.  For more
- * information on the Apache Software Foundation, please see
- * <http://www.apache.org/>.
- *
- */
-
-package org.apache.hadoop.has.common.ssl;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.cert.Certificate;
-import java.security.cert.CertificateParsingException;
-import java.security.cert.X509Certificate;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Set;
-import java.util.StringTokenizer;
-import java.util.TreeSet;
-
-import javax.net.ssl.SSLException;
-import javax.net.ssl.SSLPeerUnverifiedException;
-import javax.net.ssl.SSLSession;
-import javax.net.ssl.SSLSocket;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.has.common.util.StringUtils;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- ************************************************************************
- * Copied from the not-yet-commons-ssl project at
- * http://juliusdavies.ca/commons-ssl/
- * This project is not yet in Apache, but it is Apache 2.0 licensed.
- ************************************************************************
- * Interface for checking if a hostname matches the names stored inside the
- * server's X.509 certificate.  Correctly implements
- * javax.net.ssl.HostnameVerifier, but that interface is not recommended.
- * Instead we added several check() methods that take SSLSocket,
- * or X509Certificate, or ultimately (they all end up calling this one),
- * String.  (It's easier to supply JUnit with Strings instead of mock
- * SSLSession objects!)
- * </p><p>Our check() methods throw exceptions if the name is
- * invalid, whereas javax.net.ssl.HostnameVerifier just returns true/false.
- * <p/>
- * We provide the HostnameVerifier.DEFAULT, HostnameVerifier.STRICT, and
- * HostnameVerifier.ALLOW_ALL implementations.  We also provide the more
- * specialized HostnameVerifier.DEFAULT_AND_LOCALHOST, as well as
- * HostnameVerifier.STRICT_IE6.  But feel free to define your own
- * implementations!
- * <p/>
- * Inspired by Sebastian Hauer's original StrictSSLProtocolSocketFactory in the
- * HttpClient "contrib" repository.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
-
-    @Override
-    boolean verify(String host, SSLSession session);
-
-    void check(String host, SSLSocket ssl) throws IOException;
-
-    void check(String host, X509Certificate cert) throws SSLException;
-
-    void check(String host, String[] cns, String[] subjectAlts)
-        throws SSLException;
-
-    void check(String[] hosts, SSLSocket ssl) throws IOException;
-
-    void check(String[] hosts, X509Certificate cert) throws SSLException;
-
-
-    /**
-     * Checks to see if the supplied hostname matches any of the supplied CNs
-     * or "DNS" Subject-Alts.  Most implementations only look at the first CN,
-     * and ignore any additional CNs.  Most implementations do look at all of
-     * the "DNS" Subject-Alts. The CNs or Subject-Alts may contain wildcards
-     * according to RFC 2818.
-     *
-     * @param cns         CN fields, in order, as extracted from the X.509
-     *                    certificate.
-     * @param subjectAlts Subject-Alt fields of type 2 ("DNS"), as extracted
-     *                    from the X.509 certificate.
-     * @param hosts       The array of hostnames to verify.
-     * @throws SSLException If verification failed.
-     */
-    void check(String[] hosts, String[] cns, String[] subjectAlts)
-        throws SSLException;
-
-
-    /**
-     * The DEFAULT HostnameVerifier works the same way as Curl and Firefox.
-     * <p/>
-     * The hostname must match either the first CN, or any of the subject-alts.
-     * A wildcard can occur in the CN, and in any of the subject-alts.
-     * <p/>
-     * The only difference between DEFAULT and STRICT is that a wildcard (such
-     * as "*.foo.com") with DEFAULT matches all subdomains, including
-     * "a.b.foo.com".
-     */
-    SSLHostnameVerifier DEFAULT =
-        new AbstractVerifier() {
-            @Override
-            public final void check(final String[] hosts, final String[] cns,
-                                    final String[] subjectAlts)
-                throws SSLException {
-                check(hosts, cns, subjectAlts, false, false);
-            }
-
-            @Override
-            public final String toString() {
-                return "DEFAULT";
-            }
-        };
-
-
-    /**
-     * The DEFAULT_AND_LOCALHOST HostnameVerifier works like the DEFAULT
-     * one with one additional relaxation:  a host of "localhost",
-     * "localhost.localdomain", "127.0.0.1", "::1" will always pass, no matter
-     * what is in the server's certificate.
-     */
-    SSLHostnameVerifier DEFAULT_AND_LOCALHOST =
-        new AbstractVerifier() {
-            @Override
-            public final void check(final String[] hosts, final String[] cns,
-                                    final String[] subjectAlts)
-                throws SSLException {
-                if (isLocalhost(hosts[0])) {
-                    return;
-                }
-                check(hosts, cns, subjectAlts, false, false);
-            }
-
-            @Override
-            public final String toString() {
-                return "DEFAULT_AND_LOCALHOST";
-            }
-        };
-
-    /**
-     * The STRICT HostnameVerifier works the same way as java.net.URL in Sun
-     * Java 1.4, Sun Java 5, Sun Java 6.  It's also pretty close to IE6.
-     * This implementation appears to be compliant with RFC 2818 for dealing
-     * with wildcards.
-     * <p/>
-     * The hostname must match either the first CN, or any of the subject-alts.
-     * A wildcard can occur in the CN, and in any of the subject-alts.  The
-     * one divergence from IE6 is how we only check the first CN.  IE6 allows
-     * a match against any of the CNs present.  We decided to follow in
-     * Sun Java 1.4's footsteps and only check the first CN.
-     * <p/>
-     * A wildcard such as "*.foo.com" matches only subdomains in the same
-     * level, for example "a.foo.com".  It does not match deeper subdomains
-     * such as "a.b.foo.com".
-     */
-    SSLHostnameVerifier STRICT =
-        new AbstractVerifier() {
-            @Override
-            public final void check(final String[] host, final String[] cns,
-                                    final String[] subjectAlts)
-                throws SSLException {
-                check(host, cns, subjectAlts, false, true);
-            }
-
-            @Override
-            public final String toString() {
-                return "STRICT";
-            }
-        };
-
-    /**
-     * The STRICT_IE6 HostnameVerifier works just like the STRICT one with one
-     * minor variation:  the hostname can match against any of the CN's in the
-     * server's certificate, not just the first one.  This behaviour is
-     * identical to IE6's behaviour.
-     */
-    SSLHostnameVerifier STRICT_IE6 =
-        new AbstractVerifier() {
-            @Override
-            public final void check(final String[] host, final String[] cns,
-                                    final String[] subjectAlts)
-                throws SSLException {
-                check(host, cns, subjectAlts, true, true);
-            }
-
-            @Override
-            public final String toString() {
-                return "STRICT_IE6";
-            }
-        };
-
-    /**
-     * The ALLOW_ALL HostnameVerifier essentially turns hostname verification
-     * off.  This implementation is a no-op, and never throws the SSLException.
-     */
-    SSLHostnameVerifier ALLOW_ALL =
-        new AbstractVerifier() {
-            @Override
-            public final void check(final String[] host, final String[] cns,
-                                    final String[] subjectAlts) {
-                // Allow everything - so never blowup.
-            }
-
-            @Override
-            public final String toString() {
-                return "ALLOW_ALL";
-            }
-        };
-
-    abstract class AbstractVerifier implements SSLHostnameVerifier {
-
-        /**
-         * This contains a list of 2nd-level domains that aren't allowed to
-         * have wildcards when combined with country-codes.
-         * For example: [*.co.uk].
-         * <p/>
-         * The [*.co.uk] problem is an interesting one.  Should we just hope
-         * that CA's would never foolishly allow such a certificate to happen?
-         * Looks like we're the only implementation guarding against this.
-         * Firefox, Curl, Sun Java 1.4, 5, 6 don't bother with this check.
-         */
-        private static final String[] BAD_COUNTRY_2LDS =
-            {"ac", "co", "com", "ed", "edu", "go", "gouv", "gov", "info",
-                "lg", "ne", "net", "or", "org"};
-
-        private static final String[] LOCALHOSTS = {"::1", "127.0.0.1",
-            "localhost",
-            "localhost.localdomain"};
-
-
-        static {
-            // Just in case developer forgot to manually sort the array.  :-)
-            Arrays.sort(BAD_COUNTRY_2LDS);
-            Arrays.sort(LOCALHOSTS);
-        }
-
-        protected AbstractVerifier() {
-        }
-
-        /**
-         * The javax.net.ssl.HostnameVerifier contract.
-         *
-         * @param host    'hostname' we used to create our socket
-         * @param session SSLSession with the remote server
-         * @return true if the host matched the one in the certificate.
-         */
-        @Override
-        public boolean verify(String host, SSLSession session) {
-            try {
-                Certificate[] certs = session.getPeerCertificates();
-                X509Certificate x509 = (X509Certificate) certs[0];
-                check(new String[]{host}, x509);
-                return true;
-            } catch (SSLException e) {
-                return false;
-            }
-        }
-
-        @Override
-        public void check(String host, SSLSocket ssl) throws IOException {
-            check(new String[]{host}, ssl);
-        }
-
-        @Override
-        public void check(String host, X509Certificate cert)
-            throws SSLException {
-            check(new String[]{host}, cert);
-        }
-
-        @Override
-        public void check(String host, String[] cns, String[] subjectAlts)
-            throws SSLException {
-            check(new String[]{host}, cns, subjectAlts);
-        }
-
-        @Override
-        public void check(String[] host, SSLSocket ssl)
-            throws IOException {
-            if (host == null) {
-                throw new NullPointerException("host to verify is null");
-            }
-
-            SSLSession session = ssl.getSession();
-            if (session == null) {
-                // In our experience this only happens under IBM 1.4.x when
-                // spurious (unrelated) certificates show up in the server'
-                // chain.  Hopefully this will unearth the real problem:
-                InputStream in = ssl.getInputStream();
-                in.available();
-                /*
-                  If you're looking at the 2 lines of code above because
-                  you're running into a problem, you probably have two
-                  options:
-
-                    #1.  Clean up the certificate chain that your server
-                         is presenting (e.g. edit "/etc/apache2/server.crt"
-                         or wherever it is your server's certificate chain
-                         is defined).
-
-                                               OR
-
-                    #2.   Upgrade to an IBM 1.5.x or greater JVM, or switch
-                          to a non-IBM JVM.
-                */
-
-                // If ssl.getInputStream().available() didn't cause an
-                // exception, maybe at least now the session is available?
-                session = ssl.getSession();
-                if (session == null) {
-                    // If it's still null, probably a startHandshake() will
-                    // unearth the real problem.
-                    ssl.startHandshake();
-
-                    // Okay, if we still haven't managed to cause an exception,
-                    // might as well go for the NPE.  Or maybe we're okay now?
-                    session = ssl.getSession();
-                }
-            }
-            Certificate[] certs;
-            try {
-                certs = session.getPeerCertificates();
-            } catch (SSLPeerUnverifiedException spue) {
-                InputStream in = ssl.getInputStream();
-                in.available();
-                // Didn't trigger anything interesting?  Okay, just throw
-                // original.
-                throw spue;
-            }
-            X509Certificate x509 = (X509Certificate) certs[0];
-            check(host, x509);
-        }
-
-        @Override
-        public void check(String[] host, X509Certificate cert)
-            throws SSLException {
-            String[] cns = Certificates.getCNs(cert);
-            String[] subjectAlts = Certificates.getDNSSubjectAlts(cert);
-            check(host, cns, subjectAlts);
-        }
-
-        public void check(final String[] hosts, final String[] cns,
-                          final String[] subjectAlts, final boolean ie6,
-                          final boolean strictWithSubDomains)
-            throws SSLException {
-            // Build up lists of allowed hosts For logging/debugging purposes.
-            StringBuffer buf = new StringBuffer(32);
-            buf.append('<');
-            for (int i = 0; i < hosts.length; i++) {
-                String h = hosts[i];
-                h = h != null ? StringUtils.toLowerCase(h.trim()) : "";
-                hosts[i] = h;
-                if (i > 0) {
-                    buf.append('/');
-                }
-                buf.append(h);
-            }
-            buf.append('>');
-            String hostnames = buf.toString();
-            // Build the list of names we're going to check.  Our DEFAULT and
-            // STRICT implementations of the HostnameVerifier only use the
-            // first CN provided.  All other CNs are ignored.
-            // (Firefox, wget, curl, Sun Java 1.4, 5, 6 all work this way).
-            final Set<String> names = new TreeSet<String>();
-            if (cns != null && cns.length > 0 && cns[0] != null) {
-                names.add(cns[0]);
-                if (ie6) {
-                    for (int i = 1; i < cns.length; i++) {
-                        names.add(cns[i]);
-                    }
-                }
-            }
-            if (subjectAlts != null) {
-                for (int i = 0; i < subjectAlts.length; i++) {
-                    if (subjectAlts[i] != null) {
-                        names.add(subjectAlts[i]);
-                    }
-                }
-            }
-            if (names.isEmpty()) {
-                String msg = "Certificate for " + hosts[0] + " doesn't contain CN or DNS subjectAlt";
-                throw new SSLException(msg);
-            }
-
-            // StringBuffer for building the error message.
-            buf = new StringBuffer();
-
-            boolean match = false;
-            out:
-            for (Iterator<String> it = names.iterator(); it.hasNext();) {
-                // Don't trim the CN, though!
-                final String cn = StringUtils.toLowerCase(it.next());
-                // Store CN in StringBuffer in case we need to report an error.
-                buf.append(" <");
-                buf.append(cn);
-                buf.append('>');
-                if (it.hasNext()) {
-                    buf.append(" OR");
-                }
-
-                // The CN better have at least two dots if it wants wildcard
-                // action.  It also can't be [*.co.uk] or [*.co.jp] or
-                // [*.org.uk], etc...
-                boolean doWildcard = cn.startsWith("*.")
-                    && cn.lastIndexOf('.') >= 0
-                    && !isIP4Address(cn)
-                    && acceptableCountryWildcard(cn);
-
-                for (int i = 0; i < hosts.length; i++) {
-                    final String hostName =
-                        StringUtils.toLowerCase(hosts[i].trim());
-                    if (doWildcard) {
-                        match = hostName.endsWith(cn.substring(1));
-                        if (match && strictWithSubDomains) {
-                            // If we're in strict mode, then [*.foo.com] is not
-                            // allowed to match [a.b.foo.com]
-                            match = countDots(hostName) == countDots(cn);
-                        }
-                    } else {
-                        match = hostName.equals(cn);
-                    }
-                    if (match) {
-                        break out;
-                    }
-                }
-            }
-            if (!match) {
-                throw new SSLException("hostname in certificate didn't match: " + hostnames + " !=" + buf);
-            }
-        }
-
-        public static boolean isIP4Address(final String cn) {
-            boolean isIP4 = true;
-            String tld = cn;
-            int x = cn.lastIndexOf('.');
-            // We only bother analyzing the characters after the final dot
-            // in the name.
-            if (x >= 0 && x + 1 < cn.length()) {
-                tld = cn.substring(x + 1);
-            }
-            for (int i = 0; i < tld.length(); i++) {
-                if (!Character.isDigit(tld.charAt(0))) {
-                    isIP4 = false;
-                    break;
-                }
-            }
-            return isIP4;
-        }
-
-        public static boolean acceptableCountryWildcard(final String cn) {
-            int cnLen = cn.length();
-            if (cnLen >= 7 && cnLen <= 9) {
-                // Look for the '.' in the 3rd-last position:
-                if (cn.charAt(cnLen - 3) == '.') {
-                    // Trim off the [*.] and the [.XX].
-                    String s = cn.substring(2, cnLen - 3);
-                    // And test against the sorted array of bad 2lds:
-                    int x = Arrays.binarySearch(BAD_COUNTRY_2LDS, s);
-                    return x < 0;
-                }
-            }
-            return true;
-        }
-
-        public static boolean isLocalhost(String host) {
-            host = host != null ? StringUtils.toLowerCase(host.trim()) : "";
-            if (host.startsWith("::1")) {
-                int x = host.lastIndexOf('%');
-                if (x >= 0) {
-                    host = host.substring(0, x);
-                }
-            }
-            int x = Arrays.binarySearch(LOCALHOSTS, host);
-            return x >= 0;
-        }
-
-        /**
-         * Counts the number of dots "." in a string.
-         *
-         * @param s string to count dots from
-         * @return number of dots
-         */
-        public static int countDots(final String s) {
-            int count = 0;
-            for (int i = 0; i < s.length(); i++) {
-                if (s.charAt(i) == '.') {
-                    count++;
-                }
-            }
-            return count;
-        }
-    }
-
-    class Certificates {
-      public static String[] getCNs(X509Certificate cert) {
-        final List<String> cnList = new LinkedList<String>();
-        /*
-          Sebastian Hauer's original StrictSSLProtocolSocketFactory used
-          getName() and had the following comment:
-
-             Parses a X.500 distinguished name for the value of the
-             "Common Name" field.  This is done a bit sloppy right
-             now and should probably be done a bit more according to
-             <code>RFC 2253</code>.
-
-           I've noticed that toString() seems to do a better job than
-           getName() on these X500Principal objects, so I'm hoping that
-           addresses Sebastian's concern.
-
-           For example, getName() gives me this:
-           1.2.840.113549.1.9.1=#16166a756c6975736461766965734063756362632e636f6d
-
-           whereas toString() gives me this:
-           EMAILADDRESS=juliusdavies@cucbc.com
-
-           Looks like toString() even works with non-ascii domain names!
-           I tested it with "&#x82b1;&#x5b50;.co.jp" and it worked fine.
-          */
-        String subjectPrincipal = cert.getSubjectX500Principal().toString();
-        StringTokenizer st = new StringTokenizer(subjectPrincipal, ",");
-        while (st.hasMoreTokens()) {
-            String tok = st.nextToken();
-            int x = tok.indexOf("CN=");
-            if (x >= 0) {
-                cnList.add(tok.substring(x + 3));
-            }
-        }
-        if (!cnList.isEmpty()) {
-            String[] cns = new String[cnList.size()];
-            cnList.toArray(cns);
-            return cns;
-        } else {
-            return null;
-        }
-      }
-
-
-      /**
-       * Extracts the array of SubjectAlt DNS names from an X509Certificate.
-       * Returns null if there aren't any.
-       * <p/>
-       * Note:  Java doesn't appear able to extract international characters
-       * from the SubjectAlts.  It can only extract international characters
-       * from the CN field.
-       * <p/>
-       * (Or maybe the version of OpenSSL I'm using to test isn't storing the
-       * international characters correctly in the SubjectAlts?).
-       *
-       * @param cert X509Certificate
-       * @return Array of SubjectALT DNS names stored in the certificate.
-       */
-      public static String[] getDNSSubjectAlts(X509Certificate cert) {
-          final List<String> subjectAltList = new LinkedList<String>();
-          Collection<List<?>> c = null;
-          try {
-              c = cert.getSubjectAlternativeNames();
-          } catch (CertificateParsingException cpe) {
-              // Should probably log.debug() this?
-              cpe.printStackTrace();
-          }
-          if (c != null) {
-              Iterator<List<?>> it = c.iterator();
-              while (it.hasNext()) {
-                  List<?> list = it.next();
-                  int type = ((Integer) list.get(0)).intValue();
-                  // If type is 2, then we've got a dNSName
-                  if (type == 2) {
-                      String s = (String) list.get(1);
-                      subjectAltList.add(s);
-                  }
-              }
-          }
-          if (!subjectAltList.isEmpty()) {
-              String[] subjectAlts = new String[subjectAltList.size()];
-              subjectAltList.toArray(subjectAlts);
-              return subjectAlts;
-          } else {
-              return null;
-          }
-      }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/util/ConnectionConfigurator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/ConnectionConfigurator.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/ConnectionConfigurator.java
deleted file mode 100644
index 3d5dd39..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/ConnectionConfigurator.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License. See accompanying LICENSE file.
- */
-package org.apache.hadoop.has.common.util;
-
-import java.io.IOException;
-import java.net.HttpURLConnection;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- * Interface to configure  {@link HttpURLConnection} created by
- * {@link org.apache.hadoop.has.common.spnego.AuthenticatedURL} instances.
- */
-public interface ConnectionConfigurator {
-
-  /**
-   * Configures the given {@link HttpURLConnection} instance.
-   *
-   * @param conn the {@link HttpURLConnection} instance to configure.
-   * @return the configured {@link HttpURLConnection} instance.
-   * 
-   * @throws IOException if an IO error occurred.
-   */
-  HttpURLConnection configure(HttpURLConnection conn) throws IOException;
-
-}


[05/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/hadoop/has/server/TestHasWebServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/TestHasWebServer.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestHasWebServer.java
deleted file mode 100644
index 148909f..0000000
--- a/has/has-server/src/test/java/org/apache/hadoop/has/server/TestHasWebServer.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.util.URLConnectionFactory;
-import org.apache.hadoop.has.server.web.WebConfigKey;
-import org.apache.hadoop.has.server.web.WebServer;
-import org.apache.hadoop.http.HttpConfig.Policy;
-import org.apache.hadoop.net.NetUtils;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-import java.io.File;
-import java.net.InetSocketAddress;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.Arrays;
-import java.util.Collection;
-
-@RunWith(value = Parameterized.class)
-public class TestHasWebServer {
-  private static final String KEY_STORE_DIR = TestUtil.getTempPath("keystore");
-  private static File keyStoreDir = new File(KEY_STORE_DIR);
-  private static HasConfig httpsConf;
-  private static URLConnectionFactory connectionFactory;
-
-  @Parameterized.Parameters
-  public static Collection<Object[]> policy() {
-    Object[][] params = new Object[][]{{Policy.HTTP_ONLY},
-        {Policy.HTTPS_ONLY}, {Policy.HTTP_AND_HTTPS}};
-    return Arrays.asList(params);
-  }
-
-  private final Policy policy;
-
-  public TestHasWebServer(Policy policy) {
-    super();
-    this.policy = policy;
-  }
-
-  @BeforeClass
-  public static void setUp() throws Exception {
-    httpsConf = new HasConfig();
-    // Create test keystore dir.
-    if (!keyStoreDir.exists()) {
-      if (!keyStoreDir.mkdirs()) {
-        System.err.println("Failed to create keystore-dir.");
-        System.exit(3);
-      }
-    }
-    String sslConfDir = TestUtil.getClasspathDir(TestRestApiBase.class);
-    TestUtil.setupSSLConfig(KEY_STORE_DIR, sslConfDir, httpsConf, false);
-    connectionFactory = URLConnectionFactory.newDefaultURLConnectionFactory(httpsConf);
-  }
-
-  @AfterClass
-  public static void tearDown() throws Exception {
-    FileUtil.fullyDelete(keyStoreDir);
-  }
-
-  @Test
-  public void testHttpPolicy() throws Exception {
-    httpsConf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, policy.name());
-    httpsConf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY, "localhost:11236");
-    httpsConf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, "localhost:19278");
-    httpsConf.setString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE, "simple");
-
-    WebServer server = null;
-    try {
-      server = new WebServer(httpsConf);
-      server.start();
-
-      Assert.assertTrue(implies(policy.isHttpEnabled(),
-          canAccess("http", server.getHttpAddress())));
-      Assert.assertTrue(implies(!policy.isHttpEnabled(),
-          server.getHttpAddress() == null));
-
-      Assert.assertTrue(implies(policy.isHttpsEnabled(),
-          canAccess("https", server.getHttpsAddress())));
-      Assert.assertTrue(implies(!policy.isHttpsEnabled(),
-          server.getHttpsAddress() == null));
-    } finally {
-      if (server != null) {
-        server.stop();
-      }
-    }
-  }
-
-  private static boolean canAccess(String scheme, InetSocketAddress address) {
-    if (address == null) {
-      return false;
-    }
-    try {
-      URL url = new URL(scheme + "://" + NetUtils.getHostPortString(address));
-      URLConnection conn = connectionFactory.openConnection(url);
-      conn.connect();
-      conn.getContent();
-    } catch (Exception e) {
-      return false;
-    }
-    return true;
-  }
-
-  private static boolean implies(boolean a, boolean b) {
-    return !a || b;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/hadoop/has/server/TestRestApiBase.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/TestRestApiBase.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestRestApiBase.java
deleted file mode 100644
index e95382c..0000000
--- a/has/has-server/src/test/java/org/apache/hadoop/has/server/TestRestApiBase.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server;
-
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.WebResource;
-import com.sun.jersey.api.client.config.ClientConfig;
-import com.sun.jersey.api.client.config.DefaultClientConfig;
-import com.sun.jersey.client.urlconnection.HTTPSProperties;
-import com.sun.jersey.core.util.MultivaluedMapImpl;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasConfigKey;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.spnego.AuthenticationException;
-import org.apache.hadoop.has.common.util.URLConnectionFactory;
-import org.apache.hadoop.has.server.web.WebConfigKey;
-import org.apache.hadoop.has.server.web.WebServer;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.security.ssl.SSLFactory;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.glassfish.jersey.SslConfigurator;
-import org.junit.After;
-import org.junit.Before;
-
-import javax.net.ssl.HostnameVerifier;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.SSLSession;
-import javax.ws.rs.core.MultivaluedMap;
-import java.io.*;
-import java.net.HttpURLConnection;
-import java.net.MalformedURLException;
-import java.net.URL;
-
-import static org.junit.Assert.assertEquals;
-
-public class TestRestApiBase {
-    private static String address;
-    protected static File testDir = new File(System.getProperty("test.dir", "target"));
-    private static File testClassDir = new File(testDir, "test-classes");
-    private static File confDir = new File(testClassDir, "conf");
-    private static File workDir = new File(testDir, "work-dir");
-    private static HasServer server = null;
-    private static final String KEY_STORE_DIR = TestUtil.getTempPath("keystore");
-    private static File keyStoreDir = new File(KEY_STORE_DIR);
-    private static HasConfig httpsConf;
-
-    @Before
-    public void startHasServer() throws Exception {
-        // Create test keystoreDir and workDir.
-        if (!keyStoreDir.exists()) {
-            if (!keyStoreDir.mkdirs()) {
-                System.err.println("Failed to create keystore-dir.");
-                System.exit(3);
-            }
-        }
-
-        if (!workDir.exists()) {
-            if (!workDir.mkdirs()) {
-                System.err.println("Failed to create work-dir.");
-                System.exit(3);
-            }
-        }
-
-        // Configure test HAS server.
-        httpsConf = new HasConfig();
-        String sslConfDir = TestUtil.getClasspathDir(TestRestApiBase.class);
-        TestUtil.setupSSLConfig(KEY_STORE_DIR, sslConfDir, httpsConf, false);
-        httpsConf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
-        httpsConf.setString(HasConfigKey.FILTER_AUTH_TYPE, "simple");
-
-        // Start test HAS server.
-        int httpsPort = 10000 + (int) (System.currentTimeMillis() % 10000); // Generate test port randomly
-        String host = "localhost";
-        address = host + ":" + httpsPort;
-        httpsConf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, address);
-
-        server = new HasServer(confDir);
-        server.setWebServer(new WebServer(httpsConf));
-        server.setWorkDir(workDir);
-        try {
-            server.startWebServer();
-        } catch (HasException e) {
-            System.err.println("Errors occurred when start HAS server: " + e.toString());
-            System.exit(6);
-        }
-    }
-
-    @After
-    public void stopHasServer() {
-        server.stopWebServer();
-        if (keyStoreDir.exists()) {
-            FileUtil.fullyDelete(keyStoreDir);
-        }
-        if (workDir.exists()) {
-            FileUtil.fullyDelete(workDir);
-        }
-    }
-
-    private void startKdc() {
-        WebResource webResource = getWebResource("kdcstart");
-        String response = webResource.get(String.class);
-        try {
-            JSONObject result = new JSONObject(response);
-            if (!result.getString("result").equals("success")) {
-                System.err.println("Errors occurred when start HAS KDC server.");
-                System.exit(6);
-            }
-        } catch (JSONException e) {
-            System.err.println("Errors occurred when start HAS KDC server. " + e.toString());
-            System.exit(6);
-        }
-    }
-
-    protected WebResource getWebResource(String restName) {
-        String apiUrl = "https://" + address + "/has/v1/" + restName;
-        HasConfig clientConf = new HasConfig();
-        try {
-            clientConf.addIniConfig(new File(httpsConf.getString(SSLFactory.SSL_CLIENT_CONF_KEY)));
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        SslConfigurator sslConfigurator = SslConfigurator.newInstance()
-            .trustStoreFile(clientConf.getString("ssl.client.truststore.location"))
-            .trustStorePassword(clientConf.getString("ssl.client.truststore.password"));
-        sslConfigurator.securityProtocol("SSL");
-        SSLContext sslContext = sslConfigurator.createSSLContext();
-        ClientConfig clientConfig = new DefaultClientConfig();
-        clientConfig.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES,
-            new HTTPSProperties(new HostnameVerifier() {
-                @Override
-                public boolean verify(String s, SSLSession sslSession) {
-                    return false;
-                }
-            }, sslContext));
-        Client client = Client.create(clientConfig);
-        return client.resource(apiUrl);
-    }
-
-    protected void getKrb5Conf() {
-        WebResource webResource = getWebResource("getkrb5conf");
-        ClientResponse response = webResource.get(ClientResponse.class);
-        assertEquals(200, response.getStatus());
-    }
-
-    protected void getHasConf() {
-        WebResource webResource = getWebResource("gethasconf");
-        ClientResponse response = webResource.get(ClientResponse.class);
-        assertEquals(200, response.getStatus());
-        File hasConf = new File(confDir, "has-client.conf");
-        if (hasConf.exists()) {
-            if (!hasConf.delete()) {
-                System.err.println("Failed to delete has-client.conf.");
-            }
-        }
-    }
-
-    protected void kdcStart() {
-        WebResource webResource = getWebResource("kdcstart");
-        String response = webResource.get(String.class);
-        try {
-            JSONObject result = new JSONObject(response);
-            assertEquals("success", result.getString("result"));
-        } catch (JSONException e) {
-            System.err.println("Failed to start HAS KDC server. " + e.toString());
-            System.exit(6);
-        }
-    }
-
-    protected void kdcInit() {
-        startKdc();
-        WebResource webResource = getWebResource("kdcinit");
-        ClientResponse response = webResource.get(ClientResponse.class);
-        assertEquals(200, response.getStatus());
-    }
-
-    protected void createPrincipals() {
-        String webServerUrl = "https://" + address + "/has/v1/";
-        startKdc();
-
-        // Create test host roles json object.
-        JSONObject hostRoles = new JSONObject();
-        try {
-            JSONObject host1 = new JSONObject();
-            host1.put("name", "host1");
-            host1.put("hostRoles", "HDFS,YARN");
-            JSONObject host2 = new JSONObject();
-            host2.put("name", "host2");
-            host2.put("hostRoles", "ZOOKEEPER,HBASE");
-            JSONArray hosts = new JSONArray();
-            hosts.put(host1);
-            hosts.put(host2);
-            hostRoles.put("HOSTS", hosts);
-        } catch (JSONException e) {
-            System.err.println("Failed to create test host roles json object. " + e.toString());
-            System.exit(6);
-        }
-
-        try {
-            URL url = null;
-            try {
-                url = new URL(webServerUrl + "admin/createprincipals");
-            } catch (MalformedURLException e) {
-                e.printStackTrace();
-            }
-
-            URLConnectionFactory connectionFactory = URLConnectionFactory.newDefaultURLConnectionFactory(httpsConf);
-            HttpURLConnection httpConn = (HttpURLConnection) connectionFactory.openConnection(url, false, httpsConf);
-            httpConn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
-            httpConn.setRequestMethod("PUT");
-            httpConn.setDoOutput(true);
-            httpConn.setDoInput(true);
-            httpConn.connect();
-
-            OutputStream out = httpConn.getOutputStream();
-            out.write(hostRoles.toString().getBytes());
-            out.flush();
-            out.close();
-
-            assertEquals(200, httpConn.getResponseCode());
-            BufferedReader reader = httpConn.getResponseCode()
-                == HttpURLConnection.HTTP_OK ? new BufferedReader(
-                new InputStreamReader(httpConn.getInputStream(),
-                    "UTF-8")) : new BufferedReader(
-                new InputStreamReader(httpConn.getErrorStream(),
-                    "UTF-8"));
-
-            String response = reader.readLine();
-            JSONObject result = new JSONObject(response);
-            assertEquals("success", result.getString("result"));
-        } catch (JSONException | IOException | AuthenticationException e) {
-            System.err.println("Failed to create principals by hostRoles. " + e.toString());
-            System.exit(6);
-        }
-    }
-
-    protected void exportKeytabs() {
-        startKdc();
-        WebResource webResource = getWebResource("admin/exportkeytabs");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("host", "host1");
-        params.add("role", "HDFS");
-        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
-        assertEquals(200, response.getStatus());
-    }
-
-    protected void exportKeytab() {
-        startKdc();
-        WebResource webResource = getWebResource("admin/exportkeytab");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("principal", "admin@HADOOP.COM");
-        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
-        assertEquals(200, response.getStatus());
-    }
-
-    protected void addPrincipal() {
-        startKdc();
-        WebResource webResource = getWebResource("admin/addprincipal");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("principal", "admin");
-        params.add("password", "123");
-        String response = webResource.queryParams(params).post(String.class);
-        try {
-            JSONObject result = new JSONObject(response);
-            assertEquals("success", result.getString("result"));
-        } catch (JSONException e) {
-            System.err.println("Failed to add principal. " + e.toString());
-            System.exit(6);
-        }
-    }
-
-    protected void getPrincipals() {
-        startKdc();
-        WebResource webResource = getWebResource("admin/getprincipals");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        String response = webResource.queryParams(params).get(String.class);
-        try {
-            JSONObject result = new JSONObject(response);
-            assertEquals("success", result.getString("result"));
-        } catch (JSONException e) {
-            System.err.println("Failed to get principals. " + e.toString());
-            System.exit(6);
-        }
-    }
-
-    protected void renamePrincipal() {
-        startKdc();
-        WebResource webResource = getWebResource("admin/renameprincipal");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("oldprincipal", "admin");
-        params.add("newprincipal", "admin2");
-        String response = webResource.queryParams(params).post(String.class);
-        try {
-            JSONObject result = new JSONObject(response);
-            assertEquals("success", result.getString("result"));
-        } catch (JSONException e) {
-            System.err.println("Failed to rename principal. " + e.toString());
-            System.exit(6);
-        }
-    }
-
-    protected void deletePrincipal() {
-        startKdc();
-        WebResource webResource = getWebResource("admin/deleteprincipal");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("principal", "admin2");
-        String response = webResource.queryParams(params).delete(String.class);
-        try {
-            JSONObject result = new JSONObject(response);
-            assertEquals("success", result.getString("result"));
-        } catch (JSONException e) {
-            System.err.println("Failed to delete principal. " + e.toString());
-            System.exit(6);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java
deleted file mode 100644
index 1ae1a64..0000000
--- a/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java
+++ /dev/null
@@ -1,368 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.server.web.WebConfigKey;
-import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
-import org.apache.hadoop.security.ssl.SSLFactory;
-import org.bouncycastle.x509.X509V1CertificateGenerator;
-
-import javax.security.auth.x500.X500Principal;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.math.BigInteger;
-import java.net.URL;
-import java.security.GeneralSecurityException;
-import java.security.InvalidKeyException;
-import java.security.Key;
-import java.security.KeyPair;
-import java.security.KeyPairGenerator;
-import java.security.KeyStore;
-import java.security.NoSuchAlgorithmException;
-import java.security.NoSuchProviderException;
-import java.security.SecureRandom;
-import java.security.SignatureException;
-import java.security.cert.Certificate;
-import java.security.cert.CertificateEncodingException;
-import java.security.cert.X509Certificate;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-class TestUtil {
-
-  /**
-   * system property for test data: {@value}
-   */
-  private static final String SYSPROP_TEST_DATA_DIR = "test.build.data";
-
-  /**
-   * The default path for using in Hadoop path references: {@value}
-   */
-  private static final String DEFAULT_TEST_DATA_PATH = "target/";
-
-  /**
-   * Get a temp path. This may or may not be relative; it depends on what the
-   * {@link #SYSPROP_TEST_DATA_DIR} is set to. If unset, it returns a path
-   * under the relative path {@link #DEFAULT_TEST_DATA_PATH}
-   *
-   * @param subPath sub path, with no leading "/" character
-   * @return a string to use in paths
-   */
-  public static String getTempPath(String subPath) {
-    String prop = System.getProperty(SYSPROP_TEST_DATA_DIR, DEFAULT_TEST_DATA_PATH);
-    if (prop.isEmpty()) {
-      // corner case: property is there but empty
-      prop = DEFAULT_TEST_DATA_PATH;
-    }
-    if (!prop.endsWith("/")) {
-      prop = prop + "/";
-    }
-    return prop + subPath;
-  }
-
-  public static String getClasspathDir(Class testClass) throws Exception {
-    String file = testClass.getName();
-    file = file.replace('.', '/') + ".class";
-    URL url = Thread.currentThread().getContextClassLoader().getResource(file);
-    String baseDir = url.toURI().getPath();
-    baseDir = baseDir.substring(0, baseDir.length() - file.length() - 1);
-    return baseDir;
-  }
-
-  @SuppressWarnings("deprecation")
-  /*
-   * Create a self-signed X.509 Certificate.
-   *
-   * @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB"
-   * @param pair the KeyPair
-   * @param days how many days from now the Certificate is valid for
-   * @param algorithm the signing algorithm, eg "SHA1withRSA"
-   * @return the self-signed certificate
-   */
-  private static X509Certificate generateCertificate(String dn, KeyPair pair, int days, String algorithm)
-      throws CertificateEncodingException, InvalidKeyException, IllegalStateException,
-      NoSuchProviderException, NoSuchAlgorithmException, SignatureException {
-
-    Date from = new Date();
-    Date to = new Date(from.getTime() + days * 86400000L);
-    BigInteger sn = new BigInteger(64, new SecureRandom());
-    X509V1CertificateGenerator certGen = new X509V1CertificateGenerator();
-    X500Principal dnName = new X500Principal(dn);
-
-    certGen.setSerialNumber(sn);
-    certGen.setIssuerDN(dnName);
-    certGen.setNotBefore(from);
-    certGen.setNotAfter(to);
-    certGen.setSubjectDN(dnName);
-    certGen.setPublicKey(pair.getPublic());
-    certGen.setSignatureAlgorithm(algorithm);
-
-    return certGen.generate(pair.getPrivate());
-  }
-
-  private static KeyPair generateKeyPair(String algorithm) throws NoSuchAlgorithmException {
-    KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
-    keyGen.initialize(1024);
-    return keyGen.genKeyPair();
-  }
-
-  private static KeyStore createEmptyKeyStore() throws GeneralSecurityException, IOException {
-    KeyStore ks = KeyStore.getInstance("JKS");
-    ks.load(null, null); // initialize
-    return ks;
-  }
-
-  private static void saveKeyStore(KeyStore ks, String filename, String password)
-      throws GeneralSecurityException, IOException {
-    FileOutputStream out = new FileOutputStream(filename);
-    ks.store(out, password.toCharArray());
-    out.close();
-  }
-
-  private static void createKeyStore(String filename, String password, String alias, Key privateKey, Certificate cert)
-      throws GeneralSecurityException, IOException {
-    KeyStore ks = createEmptyKeyStore();
-    ks.setKeyEntry(alias, privateKey, password.toCharArray(), new Certificate[]{cert});
-    saveKeyStore(ks, filename, password);
-  }
-
-  private static <T extends Certificate> void createTrustStore(String filename, String password, Map<String, T> certs)
-      throws GeneralSecurityException, IOException {
-    KeyStore ks = createEmptyKeyStore();
-    for (Map.Entry<String, T> cert : certs.entrySet()) {
-      ks.setCertificateEntry(cert.getKey(), cert.getValue());
-    }
-    saveKeyStore(ks, filename, password);
-  }
-
-  /**
-   * Performs complete setup of SSL configuration in preparation for testing an
-   * SSLFactory.  This includes keys, certs, keystore, truststore, the server
-   * SSL configuration file, the client SSL configuration file, and the master
-   * configuration file read by the SSLFactory.
-   *
-   * @param keystoreDir   String directory to save keystore
-   * @param sslConfDir    String directory to save SSL configuration files
-   * @param conf          Configuration master configuration to be used by an SSLFactory,
-   *                      which will be mutated by this method
-   * @param useClientCert boolean true to make the client present a cert in the SSL handshake
-   */
-  public static void setupSSLConfig(String keystoreDir, String sslConfDir, HasConfig conf, boolean useClientCert)
-      throws Exception {
-    setupSSLConfig(keystoreDir, sslConfDir, conf, useClientCert, true, "");
-  }
-
-  /**
-   * Performs complete setup of SSL configuration in preparation for testing an
-   * SSLFactory.  This includes keys, certs, keystore, truststore, the server
-   * SSL configuration file, the client SSL configuration file, and the master
-   * configuration file read by the SSLFactory.
-   *
-   * @param keystoreDir   String directory to save keystore
-   * @param sslConfDir    String directory to save SSL configuration files
-   * @param conf          Configuration master configuration to be used by an SSLFactory,
-   *                      which will be mutated by this method
-   * @param useClientCert boolean true to make the client present a cert in the SSL handshake
-   * @param trustStore    boolean true to create truststore, false not to create it
-   * @param excludeCiphers String comma separated ciphers to exclude
-   * @throws Exception e
-   */
-  private static void setupSSLConfig(String keystoreDir, String sslConfDir, HasConfig conf, boolean useClientCert,
-                                     boolean trustStore, String excludeCiphers) throws Exception {
-    String clientKS = keystoreDir + "/clientKS.jks";
-    String clientPassword = "clientP";
-    String serverKS = keystoreDir + "/serverKS.jks";
-    String serverPassword = "serverP";
-    String trustKS = null;
-    String trustPassword = "trustP";
-
-    File sslClientConfFile = new File(sslConfDir, getClientSSLConfigFileName());
-    File sslServerConfFile = new File(sslConfDir, getServerSSLConfigFileName());
-
-    Map<String, X509Certificate> certs = new HashMap<String, X509Certificate>();
-
-    if (useClientCert) {
-      KeyPair cKP = TestUtil.generateKeyPair("RSA");
-      X509Certificate cCert = TestUtil.generateCertificate("CN=localhost, O=client", cKP, 30, "SHA1withRSA");
-      TestUtil.createKeyStore(clientKS, clientPassword, "client", cKP.getPrivate(), cCert);
-      certs.put("client", cCert);
-    }
-
-    KeyPair sKP = TestUtil.generateKeyPair("RSA");
-    X509Certificate sCert = TestUtil.generateCertificate("CN=localhost, O=server", sKP, 30, "SHA1withRSA");
-    TestUtil.createKeyStore(serverKS, serverPassword, "server", sKP.getPrivate(), sCert);
-    certs.put("server", sCert);
-
-    if (trustStore) {
-      trustKS = keystoreDir + "/trustKS.jks";
-      TestUtil.createTrustStore(trustKS, trustPassword, certs);
-    }
-
-    HasConfig clientSSLConf = createClientSSLConfig(clientKS, clientPassword, clientPassword, trustKS, excludeCiphers);
-    HasConfig serverSSLConf = createServerSSLConfig(serverKS, serverPassword, serverPassword, trustKS, excludeCiphers);
-
-    saveConfig(sslClientConfFile, clientSSLConf);
-    saveConfig(sslServerConfFile, serverSSLConf);
-
-    conf.setString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
-    conf.setString(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfFile.getCanonicalPath());
-    conf.setString(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile.getCanonicalPath());
-    conf.setString(WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, sslServerConfFile.getAbsolutePath());
-    conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, useClientCert);
-  }
-
-  /**
-   * Create SSL configuration for a client.
-   *
-   * @param clientKS       String client keystore file
-   * @param password       String store password, or null to avoid setting store password
-   * @param keyPassword    String key password, or null to avoid setting key password
-   * @param trustKS        String truststore file
-   * @param excludeCiphers String comma separated ciphers to exclude
-   * @return Configuration for client SSL
-   */
-  private static HasConfig createClientSSLConfig(String clientKS, String password, String keyPassword,
-                                                 String trustKS, String excludeCiphers) {
-    return createSSLConfig(SSLFactory.Mode.CLIENT, clientKS, password, keyPassword, trustKS, excludeCiphers);
-  }
-
-  /**
-   * Creates SSL configuration for a server.
-   *
-   * @param serverKS       String server keystore file
-   * @param password       String store password, or null to avoid setting store password
-   * @param keyPassword    String key password, or null to avoid setting key password
-   * @param trustKS        String truststore file
-   * @param excludeCiphers String comma separated ciphers to exclude
-   * @return HasConfig
-   * @throws IOException e
-   */
-  private static HasConfig createServerSSLConfig(String serverKS, String password, String keyPassword,
-                                                 String trustKS, String excludeCiphers) throws IOException {
-    return createSSLConfig(SSLFactory.Mode.SERVER, serverKS, password, keyPassword, trustKS, excludeCiphers);
-  }
-
-  /**
-   * Returns the client SSL configuration file name.  Under parallel test
-   * execution, this file name is parametrized by a unique ID to ensure that
-   * concurrent tests don't collide on an SSL configuration file.
-   *
-   * @return client SSL configuration file name
-   */
-  private static String getClientSSLConfigFileName() {
-    return getSSLConfigFileName("ssl-client");
-  }
-
-  /**
-   * Returns the server SSL configuration file name.  Under parallel test
-   * execution, this file name is parametrized by a unique ID to ensure that
-   * concurrent tests don't collide on an SSL configuration file.
-   *
-   * @return client SSL configuration file name
-   */
-  private static String getServerSSLConfigFileName() {
-    return getSSLConfigFileName("ssl-server");
-  }
-
-  /**
-   * Returns an SSL configuration file name.  Under parallel test
-   * execution, this file name is parametrized by a unique ID to ensure that
-   * concurrent tests don't collide on an SSL configuration file.
-   *
-   * @param base the base of the file name
-   * @return SSL configuration file name for base
-   */
-  private static String getSSLConfigFileName(String base) {
-    String testUniqueForkId = System.getProperty("test.unique.fork.id");
-    String fileSuffix = testUniqueForkId != null ? "-" + testUniqueForkId : "";
-    return base + fileSuffix + ".xml";
-  }
-
-  /**
-   * Creates SSL configuration.
-   *
-   * @param mode        SSLFactory.Mode mode to configure
-   * @param keystore    String keystore file
-   * @param password    String store password, or null to avoid setting store password
-   * @param keyPassword String key password, or null to avoid setting key password
-   * @param trustKS     String truststore file
-   * @return Configuration for SSL
-   */
-  private static HasConfig createSSLConfig(SSLFactory.Mode mode, String keystore, String password,
-                                           String keyPassword, String trustKS, String excludeCiphers) {
-    String trustPassword = "trustP";
-
-    HasConfig sslConf = new HasConfig();
-    if (keystore != null) {
-      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-          FileBasedKeyStoresFactory.SSL_KEYSTORE_LOCATION_TPL_KEY), keystore);
-    }
-    if (password != null) {
-      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-          FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY), password);
-    }
-    if (keyPassword != null) {
-      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-          FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
-          keyPassword);
-    }
-    if (trustKS != null) {
-      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-          FileBasedKeyStoresFactory.SSL_TRUSTSTORE_LOCATION_TPL_KEY), trustKS);
-    }
-    if (trustPassword != null) {
-      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-          FileBasedKeyStoresFactory.SSL_TRUSTSTORE_PASSWORD_TPL_KEY),
-          trustPassword);
-    }
-    if (null != excludeCiphers && !excludeCiphers.isEmpty()) {
-      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-          FileBasedKeyStoresFactory.SSL_EXCLUDE_CIPHER_LIST),
-          excludeCiphers);
-    }
-    sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
-        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY), "1000");
-
-    return sslConf;
-  }
-
-  /**
-   * Saves configuration to a file.
-   *
-   * @param file File to save
-   * @param conf Configuration contents to write to file
-   * @throws IOException if there is an I/O error saving the file
-   */
-  private static void saveConfig(File file, HasConfig conf) throws IOException {
-    OutputStream output = new FileOutputStream(file);
-    Properties prop = new Properties();
-
-    // set the properties value
-    for (String name : conf.getNames()) {
-      prop.setProperty(name, conf.getString(name));
-    }
-
-    // save properties to project root folder
-    prop.store(output, null);
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java
deleted file mode 100644
index 1f7b443..0000000
--- a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.json;
-
-import com.sun.jersey.api.client.WebResource;
-import com.sun.jersey.core.util.MultivaluedMapImpl;
-import org.apache.hadoop.has.server.TestRestApiBase;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
-
-import java.io.File;
-import java.io.IOException;
-import javax.ws.rs.core.MultivaluedMap;
-
-import static org.junit.Assert.assertEquals;
-
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TestJsonConfApi extends TestRestApiBase {
-
-    @Test
-    public void testSetPlugin() {
-        WebResource webResource = getWebResource("conf/setplugin");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("plugin", "RAM");
-        String response = webResource.queryParams(params).put(String.class);
-        assertEquals("HAS plugin set successfully.\n", response);
-    }
-
-    @Test
-    public void testConfigKdcBackend() {
-        WebResource webResource = getWebResource("conf/configkdcbackend");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("backendType", "json");
-        String backend = null;
-        try {
-            backend = new File(testDir, "json-backend").getCanonicalPath();
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        params.add("dir", backend);
-        String response = webResource.queryParams(params).put(String.class);
-        assertEquals("Json backend set successfully.\n", response);
-    }
-
-    @Test
-    public void testConfigXJsonKdc() {
-        WebResource webResource = getWebResource("conf/configkdc");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("realm", "HADOOP.COM");
-        params.add("host", "localhost");
-        params.add("port", "8866");
-        String response = webResource.queryParams(params).put(String.class);
-        assertEquals("HAS server KDC set successfully.\n", response);
-    }
-
-    @Test
-    public void testGetKrb5Conf() {
-        getKrb5Conf();
-    }
-
-    @Test
-    public void testGetHasConf() {
-        getHasConf();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java
deleted file mode 100644
index 412a8a1..0000000
--- a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.json;
-
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.WebResource;
-import com.sun.jersey.core.util.MultivaluedMapImpl;
-import org.apache.hadoop.has.server.TestRestApiBase;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
-
-import javax.ws.rs.core.MultivaluedMap;
-
-import static org.junit.Assert.assertEquals;
-
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TestJsonHadminApi extends TestRestApiBase {
-
-    @Test
-    public void testCreatePrincipals() {
-        createPrincipals();
-    }
-
-    @Test
-    public void testExportKeytabs() {
-        exportKeytabs();
-    }
-
-    @Test
-    public void testExportKeytab() {
-        exportKeytab();
-    }
-
-    @Test
-    public void testAddPrincipal() {
-        addPrincipal();
-    }
-
-    @Test
-    public void testGetPrincipals() {
-        getPrincipals();
-    }
-
-    @Test
-    public void testRenamePrincipal() {
-        renamePrincipal();
-    }
-
-    @Test
-    public void testXDeletePrincipal() {
-        deletePrincipal();
-    }
-
-    @Test
-    public void testSetConf() {
-        WebResource webResource = getWebResource("admin/setconf");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("isEnable", "true");
-        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
-        assertEquals(200, response.getStatus());
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java
deleted file mode 100644
index bd72448..0000000
--- a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.json;
-
-import com.sun.jersey.api.client.WebResource;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.has.server.TestRestApiBase;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
-
-import java.io.File;
-
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TestJsonHasApi extends TestRestApiBase {
-
-    @Test
-    public void hostRoles() {
-        WebResource webResource = getWebResource("hostroles");
-        String response = webResource.get(String.class);
-        System.out.println(response);
-    }
-
-    @Test
-    public void testKdcStart() {
-        kdcStart();
-        File backendDir = new File(testDir, "json-backend");
-        if (backendDir.exists()) {
-            FileUtil.fullyDelete(backendDir);
-        }
-    }
-
-    @Test
-    public void testKdcInit() {
-        kdcInit();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java
deleted file mode 100644
index 6dc240d..0000000
--- a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.mysql;
-
-import com.sun.jersey.api.client.WebResource;
-import com.sun.jersey.core.util.MultivaluedMapImpl;
-import org.apache.hadoop.has.server.TestRestApiBase;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
-
-import javax.ws.rs.core.MultivaluedMap;
-import java.io.IOException;
-
-import static org.junit.Assert.assertEquals;
-
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TestMySQLConfApi extends TestRestApiBase {
-
-    @Test
-    public void testConfigKdcBackend() throws IOException {
-        WebResource webResource = getWebResource("conf/configkdcbackend");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("backendType", "mysql");
-        params.add("driver", "org.h2.Driver");
-        params.add("url", "jdbc:h2:" + testDir.getCanonicalPath() + "/mysql-backend/mysqlbackend;MODE=MySQL");
-        params.add("user", "root");
-        params.add("password", "123456");
-        String response = webResource.queryParams(params).put(String.class);
-        assertEquals("MySQL backend set successfully.\n", response);
-    }
-
-    @Test
-    public void testConfigMySQLKdc() {
-        WebResource webResource = getWebResource("conf/configkdc");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("realm", "HADOOP.COM");
-        params.add("host", "localhost");
-        params.add("port", "8899");
-        String response = webResource.queryParams(params).put(String.class);
-        assertEquals("HAS server KDC set successfully.\n", response);
-    }
-
-    @Test
-    public void testGetKrb5Conf() {
-        getKrb5Conf();
-    }
-
-    @Test
-    public void testGetHasConf() {
-        getHasConf();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java
deleted file mode 100644
index 8adb625..0000000
--- a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.mysql;
-
-import org.apache.hadoop.has.server.TestRestApiBase;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
-
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TestMySQLHadminApi extends TestRestApiBase {
-
-    @Test
-    public void testCreatePrincipals() {
-        createPrincipals();
-    }
-
-    @Test
-    public void testExportKeytabs() {
-        exportKeytabs();
-    }
-
-    @Test
-    public void testExportKeytab() {
-        exportKeytab();
-    }
-
-    @Test
-    public void testAddPrincipal() {
-        addPrincipal();
-    }
-
-    @Test
-    public void testGetPrincipals() {
-        getPrincipals();
-    }
-
-    @Test
-    public void testRenamePrincipal() {
-        renamePrincipal();
-    }
-
-    @Test
-    public void testXDeletePrincipal() {
-        deletePrincipal();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java
deleted file mode 100644
index f2b6a4a..0000000
--- a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.mysql;
-
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.has.server.TestRestApiBase;
-import org.junit.FixMethodOrder;
-import org.junit.Test;
-import org.junit.runners.MethodSorters;
-
-import java.io.File;
-
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-public class TestMySQLHasApi extends TestRestApiBase {
-
-    @Test
-    public void testKdcStart() {
-        kdcStart();
-        File backendDir = new File(testDir, "mysql-backend");
-        if (backendDir.exists()) {
-            FileUtil.fullyDelete(backendDir);
-        }
-    }
-
-    @Test
-    public void testKdcInit() {
-        kdcInit();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/kerby/has/server/TestHasWebServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/kerby/has/server/TestHasWebServer.java b/has/has-server/src/test/java/org/apache/kerby/has/server/TestHasWebServer.java
new file mode 100644
index 0000000..a26c1e2
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/kerby/has/server/TestHasWebServer.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server;
+
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.util.URLConnectionFactory;
+import org.apache.kerby.has.server.web.WebConfigKey;
+import org.apache.kerby.has.server.web.WebServer;
+import org.apache.hadoop.http.HttpConfig.Policy;
+import org.apache.hadoop.net.NetUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Arrays;
+import java.util.Collection;
+
+@RunWith(value = Parameterized.class)
+public class TestHasWebServer {
+  private static final String KEY_STORE_DIR = TestUtil.getTempPath("keystore");
+  private static File keyStoreDir = new File(KEY_STORE_DIR);
+  private static HasConfig httpsConf;
+  private static URLConnectionFactory connectionFactory;
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> policy() {
+    Object[][] params = new Object[][]{{Policy.HTTP_ONLY},
+        {Policy.HTTPS_ONLY}, {Policy.HTTP_AND_HTTPS}};
+    return Arrays.asList(params);
+  }
+
+  private final Policy policy;
+
+  public TestHasWebServer(Policy policy) {
+    super();
+    this.policy = policy;
+  }
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    httpsConf = new HasConfig();
+    // Create test keystore dir.
+    if (!keyStoreDir.exists()) {
+      if (!keyStoreDir.mkdirs()) {
+        System.err.println("Failed to create keystore-dir.");
+        System.exit(3);
+      }
+    }
+    String sslConfDir = TestUtil.getClasspathDir(TestRestApiBase.class);
+    TestUtil.setupSSLConfig(KEY_STORE_DIR, sslConfDir, httpsConf, false);
+    connectionFactory = URLConnectionFactory.newDefaultURLConnectionFactory(httpsConf);
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    FileUtil.fullyDelete(keyStoreDir);
+  }
+
+  @Test
+  public void testHttpPolicy() throws Exception {
+    httpsConf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, policy.name());
+    httpsConf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY, "localhost:11236");
+    httpsConf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, "localhost:19278");
+    httpsConf.setString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE, "simple");
+
+    WebServer server = null;
+    try {
+      server = new WebServer(httpsConf);
+      server.start();
+
+      Assert.assertTrue(implies(policy.isHttpEnabled(),
+          canAccess("http", server.getHttpAddress())));
+      Assert.assertTrue(implies(!policy.isHttpEnabled(),
+          server.getHttpAddress() == null));
+
+      Assert.assertTrue(implies(policy.isHttpsEnabled(),
+          canAccess("https", server.getHttpsAddress())));
+      Assert.assertTrue(implies(!policy.isHttpsEnabled(),
+          server.getHttpsAddress() == null));
+    } finally {
+      if (server != null) {
+        server.stop();
+      }
+    }
+  }
+
+  private static boolean canAccess(String scheme, InetSocketAddress address) {
+    if (address == null) {
+      return false;
+    }
+    try {
+      URL url = new URL(scheme + "://" + NetUtils.getHostPortString(address));
+      URLConnection conn = connectionFactory.openConnection(url);
+      conn.connect();
+      conn.getContent();
+    } catch (Exception e) {
+      return false;
+    }
+    return true;
+  }
+
+  private static boolean implies(boolean a, boolean b) {
+    return !a || b;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/kerby/has/server/TestRestApiBase.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/kerby/has/server/TestRestApiBase.java b/has/has-server/src/test/java/org/apache/kerby/has/server/TestRestApiBase.java
new file mode 100644
index 0000000..21353c9
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/kerby/has/server/TestRestApiBase.java
@@ -0,0 +1,336 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server;
+
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.api.client.config.ClientConfig;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.client.urlconnection.HTTPSProperties;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasConfigKey;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.spnego.AuthenticationException;
+import org.apache.kerby.has.common.util.URLConnectionFactory;
+import org.apache.kerby.has.server.web.WebConfigKey;
+import org.apache.kerby.has.server.web.WebServer;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.glassfish.jersey.SslConfigurator;
+import org.junit.After;
+import org.junit.Before;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLSession;
+import javax.ws.rs.core.MultivaluedMap;
+import java.io.*;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.URL;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestRestApiBase {
+    private static String address;
+    protected static File testDir = new File(System.getProperty("test.dir", "target"));
+    private static File testClassDir = new File(testDir, "test-classes");
+    private static File confDir = new File(testClassDir, "conf");
+    private static File workDir = new File(testDir, "work-dir");
+    private static HasServer server = null;
+    private static final String KEY_STORE_DIR = TestUtil.getTempPath("keystore");
+    private static File keyStoreDir = new File(KEY_STORE_DIR);
+    private static HasConfig httpsConf;
+
+    @Before
+    public void startHasServer() throws Exception {
+        // Create test keystoreDir and workDir.
+        if (!keyStoreDir.exists()) {
+            if (!keyStoreDir.mkdirs()) {
+                System.err.println("Failed to create keystore-dir.");
+                System.exit(3);
+            }
+        }
+
+        if (!workDir.exists()) {
+            if (!workDir.mkdirs()) {
+                System.err.println("Failed to create work-dir.");
+                System.exit(3);
+            }
+        }
+
+        // Configure test HAS server.
+        httpsConf = new HasConfig();
+        String sslConfDir = TestUtil.getClasspathDir(TestRestApiBase.class);
+        TestUtil.setupSSLConfig(KEY_STORE_DIR, sslConfDir, httpsConf, false);
+        httpsConf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
+        httpsConf.setString(HasConfigKey.FILTER_AUTH_TYPE, "simple");
+
+        // Start test HAS server.
+        int httpsPort = 10000 + (int) (System.currentTimeMillis() % 10000); // Generate test port randomly
+        String host = "localhost";
+        address = host + ":" + httpsPort;
+        httpsConf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, address);
+
+        server = new HasServer(confDir);
+        server.setWebServer(new WebServer(httpsConf));
+        server.setWorkDir(workDir);
+        try {
+            server.startWebServer();
+        } catch (HasException e) {
+            System.err.println("Errors occurred when start HAS server: " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    @After
+    public void stopHasServer() {
+        server.stopWebServer();
+        if (keyStoreDir.exists()) {
+            FileUtil.fullyDelete(keyStoreDir);
+        }
+        if (workDir.exists()) {
+            FileUtil.fullyDelete(workDir);
+        }
+    }
+
+    private void startKdc() {
+        WebResource webResource = getWebResource("kdcstart");
+        String response = webResource.get(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            if (!result.getString("result").equals("success")) {
+                System.err.println("Errors occurred when start HAS KDC server.");
+                System.exit(6);
+            }
+        } catch (JSONException e) {
+            System.err.println("Errors occurred when start HAS KDC server. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected WebResource getWebResource(String restName) {
+        String apiUrl = "https://" + address + "/has/v1/" + restName;
+        HasConfig clientConf = new HasConfig();
+        try {
+            clientConf.addIniConfig(new File(httpsConf.getString(SSLFactory.SSL_CLIENT_CONF_KEY)));
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        SslConfigurator sslConfigurator = SslConfigurator.newInstance()
+            .trustStoreFile(clientConf.getString("ssl.client.truststore.location"))
+            .trustStorePassword(clientConf.getString("ssl.client.truststore.password"));
+        sslConfigurator.securityProtocol("SSL");
+        SSLContext sslContext = sslConfigurator.createSSLContext();
+        ClientConfig clientConfig = new DefaultClientConfig();
+        clientConfig.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES,
+            new HTTPSProperties(new HostnameVerifier() {
+                @Override
+                public boolean verify(String s, SSLSession sslSession) {
+                    return false;
+                }
+            }, sslContext));
+        Client client = Client.create(clientConfig);
+        return client.resource(apiUrl);
+    }
+
+    protected void getKrb5Conf() {
+        WebResource webResource = getWebResource("getkrb5conf");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+
+    protected void getHasConf() {
+        WebResource webResource = getWebResource("gethasconf");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+        File hasConf = new File(confDir, "has-client.conf");
+        if (hasConf.exists()) {
+            if (!hasConf.delete()) {
+                System.err.println("Failed to delete has-client.conf.");
+            }
+        }
+    }
+
+    protected void kdcStart() {
+        WebResource webResource = getWebResource("kdcstart");
+        String response = webResource.get(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException e) {
+            System.err.println("Failed to start HAS KDC server. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected void kdcInit() {
+        startKdc();
+        WebResource webResource = getWebResource("kdcinit");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+
+    protected void createPrincipals() {
+        String webServerUrl = "https://" + address + "/has/v1/";
+        startKdc();
+
+        // Create test host roles json object.
+        JSONObject hostRoles = new JSONObject();
+        try {
+            JSONObject host1 = new JSONObject();
+            host1.put("name", "host1");
+            host1.put("hostRoles", "HDFS,YARN");
+            JSONObject host2 = new JSONObject();
+            host2.put("name", "host2");
+            host2.put("hostRoles", "ZOOKEEPER,HBASE");
+            JSONArray hosts = new JSONArray();
+            hosts.put(host1);
+            hosts.put(host2);
+            hostRoles.put("HOSTS", hosts);
+        } catch (JSONException e) {
+            System.err.println("Failed to create test host roles json object. " + e.toString());
+            System.exit(6);
+        }
+
+        try {
+            URL url = null;
+            try {
+                url = new URL(webServerUrl + "admin/createprincipals");
+            } catch (MalformedURLException e) {
+                e.printStackTrace();
+            }
+
+            URLConnectionFactory connectionFactory = URLConnectionFactory.newDefaultURLConnectionFactory(httpsConf);
+            HttpURLConnection httpConn = (HttpURLConnection) connectionFactory.openConnection(url, false, httpsConf);
+            httpConn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
+            httpConn.setRequestMethod("PUT");
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            OutputStream out = httpConn.getOutputStream();
+            out.write(hostRoles.toString().getBytes());
+            out.flush();
+            out.close();
+
+            assertEquals(200, httpConn.getResponseCode());
+            BufferedReader reader = httpConn.getResponseCode()
+                == HttpURLConnection.HTTP_OK ? new BufferedReader(
+                new InputStreamReader(httpConn.getInputStream(),
+                    "UTF-8")) : new BufferedReader(
+                new InputStreamReader(httpConn.getErrorStream(),
+                    "UTF-8"));
+
+            String response = reader.readLine();
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException | IOException | AuthenticationException e) {
+            System.err.println("Failed to create principals by hostRoles. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected void exportKeytabs() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/exportkeytabs");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("host", "host1");
+        params.add("role", "HDFS");
+        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+
+    protected void exportKeytab() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/exportkeytab");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", "admin@HADOOP.COM");
+        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+
+    protected void addPrincipal() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/addprincipal");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", "admin");
+        params.add("password", "123");
+        String response = webResource.queryParams(params).post(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException e) {
+            System.err.println("Failed to add principal. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected void getPrincipals() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/getprincipals");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        String response = webResource.queryParams(params).get(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException e) {
+            System.err.println("Failed to get principals. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected void renamePrincipal() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/renameprincipal");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("oldprincipal", "admin");
+        params.add("newprincipal", "admin2");
+        String response = webResource.queryParams(params).post(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException e) {
+            System.err.println("Failed to rename principal. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected void deletePrincipal() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/deleteprincipal");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", "admin2");
+        String response = webResource.queryParams(params).delete(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException e) {
+            System.err.println("Failed to delete principal. " + e.toString());
+            System.exit(6);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/kerby/has/server/TestUtil.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/kerby/has/server/TestUtil.java b/has/has-server/src/test/java/org/apache/kerby/has/server/TestUtil.java
new file mode 100644
index 0000000..bcc0536
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/kerby/has/server/TestUtil.java
@@ -0,0 +1,368 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server;
+
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.server.web.WebConfigKey;
+import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.bouncycastle.x509.X509V1CertificateGenerator;
+
+import javax.security.auth.x500.X500Principal;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.math.BigInteger;
+import java.net.URL;
+import java.security.GeneralSecurityException;
+import java.security.InvalidKeyException;
+import java.security.Key;
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.KeyStore;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+import java.security.SecureRandom;
+import java.security.SignatureException;
+import java.security.cert.Certificate;
+import java.security.cert.CertificateEncodingException;
+import java.security.cert.X509Certificate;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+class TestUtil {
+
+  /**
+   * system property for test data: {@value}
+   */
+  private static final String SYSPROP_TEST_DATA_DIR = "test.build.data";
+
+  /**
+   * The default path for using in Hadoop path references: {@value}
+   */
+  private static final String DEFAULT_TEST_DATA_PATH = "target/";
+
+  /**
+   * Get a temp path. This may or may not be relative; it depends on what the
+   * {@link #SYSPROP_TEST_DATA_DIR} is set to. If unset, it returns a path
+   * under the relative path {@link #DEFAULT_TEST_DATA_PATH}
+   *
+   * @param subPath sub path, with no leading "/" character
+   * @return a string to use in paths
+   */
+  public static String getTempPath(String subPath) {
+    String prop = System.getProperty(SYSPROP_TEST_DATA_DIR, DEFAULT_TEST_DATA_PATH);
+    if (prop.isEmpty()) {
+      // corner case: property is there but empty
+      prop = DEFAULT_TEST_DATA_PATH;
+    }
+    if (!prop.endsWith("/")) {
+      prop = prop + "/";
+    }
+    return prop + subPath;
+  }
+
+  public static String getClasspathDir(Class testClass) throws Exception {
+    String file = testClass.getName();
+    file = file.replace('.', '/') + ".class";
+    URL url = Thread.currentThread().getContextClassLoader().getResource(file);
+    String baseDir = url.toURI().getPath();
+    baseDir = baseDir.substring(0, baseDir.length() - file.length() - 1);
+    return baseDir;
+  }
+
+  @SuppressWarnings("deprecation")
+  /*
+   * Create a self-signed X.509 Certificate.
+   *
+   * @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB"
+   * @param pair the KeyPair
+   * @param days how many days from now the Certificate is valid for
+   * @param algorithm the signing algorithm, eg "SHA1withRSA"
+   * @return the self-signed certificate
+   */
+  private static X509Certificate generateCertificate(String dn, KeyPair pair, int days, String algorithm)
+      throws CertificateEncodingException, InvalidKeyException, IllegalStateException,
+      NoSuchProviderException, NoSuchAlgorithmException, SignatureException {
+
+    Date from = new Date();
+    Date to = new Date(from.getTime() + days * 86400000L);
+    BigInteger sn = new BigInteger(64, new SecureRandom());
+    X509V1CertificateGenerator certGen = new X509V1CertificateGenerator();
+    X500Principal dnName = new X500Principal(dn);
+
+    certGen.setSerialNumber(sn);
+    certGen.setIssuerDN(dnName);
+    certGen.setNotBefore(from);
+    certGen.setNotAfter(to);
+    certGen.setSubjectDN(dnName);
+    certGen.setPublicKey(pair.getPublic());
+    certGen.setSignatureAlgorithm(algorithm);
+
+    return certGen.generate(pair.getPrivate());
+  }
+
+  private static KeyPair generateKeyPair(String algorithm) throws NoSuchAlgorithmException {
+    KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
+    keyGen.initialize(1024);
+    return keyGen.genKeyPair();
+  }
+
+  private static KeyStore createEmptyKeyStore() throws GeneralSecurityException, IOException {
+    KeyStore ks = KeyStore.getInstance("JKS");
+    ks.load(null, null); // initialize
+    return ks;
+  }
+
+  private static void saveKeyStore(KeyStore ks, String filename, String password)
+      throws GeneralSecurityException, IOException {
+    FileOutputStream out = new FileOutputStream(filename);
+    ks.store(out, password.toCharArray());
+    out.close();
+  }
+
+  private static void createKeyStore(String filename, String password, String alias, Key privateKey, Certificate cert)
+      throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    ks.setKeyEntry(alias, privateKey, password.toCharArray(), new Certificate[]{cert});
+    saveKeyStore(ks, filename, password);
+  }
+
+  private static <T extends Certificate> void createTrustStore(String filename, String password, Map<String, T> certs)
+      throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    for (Map.Entry<String, T> cert : certs.entrySet()) {
+      ks.setCertificateEntry(cert.getKey(), cert.getValue());
+    }
+    saveKeyStore(ks, filename, password);
+  }
+
+  /**
+   * Performs complete setup of SSL configuration in preparation for testing an
+   * SSLFactory.  This includes keys, certs, keystore, truststore, the server
+   * SSL configuration file, the client SSL configuration file, and the master
+   * configuration file read by the SSLFactory.
+   *
+   * @param keystoreDir   String directory to save keystore
+   * @param sslConfDir    String directory to save SSL configuration files
+   * @param conf          Configuration master configuration to be used by an SSLFactory,
+   *                      which will be mutated by this method
+   * @param useClientCert boolean true to make the client present a cert in the SSL handshake
+   */
+  public static void setupSSLConfig(String keystoreDir, String sslConfDir, HasConfig conf, boolean useClientCert)
+      throws Exception {
+    setupSSLConfig(keystoreDir, sslConfDir, conf, useClientCert, true, "");
+  }
+
+  /**
+   * Performs complete setup of SSL configuration in preparation for testing an
+   * SSLFactory.  This includes keys, certs, keystore, truststore, the server
+   * SSL configuration file, the client SSL configuration file, and the master
+   * configuration file read by the SSLFactory.
+   *
+   * @param keystoreDir   String directory to save keystore
+   * @param sslConfDir    String directory to save SSL configuration files
+   * @param conf          Configuration master configuration to be used by an SSLFactory,
+   *                      which will be mutated by this method
+   * @param useClientCert boolean true to make the client present a cert in the SSL handshake
+   * @param trustStore    boolean true to create truststore, false not to create it
+   * @param excludeCiphers String comma separated ciphers to exclude
+   * @throws Exception e
+   */
+  private static void setupSSLConfig(String keystoreDir, String sslConfDir, HasConfig conf, boolean useClientCert,
+                                     boolean trustStore, String excludeCiphers) throws Exception {
+    String clientKS = keystoreDir + "/clientKS.jks";
+    String clientPassword = "clientP";
+    String serverKS = keystoreDir + "/serverKS.jks";
+    String serverPassword = "serverP";
+    String trustKS = null;
+    String trustPassword = "trustP";
+
+    File sslClientConfFile = new File(sslConfDir, getClientSSLConfigFileName());
+    File sslServerConfFile = new File(sslConfDir, getServerSSLConfigFileName());
+
+    Map<String, X509Certificate> certs = new HashMap<String, X509Certificate>();
+
+    if (useClientCert) {
+      KeyPair cKP = TestUtil.generateKeyPair("RSA");
+      X509Certificate cCert = TestUtil.generateCertificate("CN=localhost, O=client", cKP, 30, "SHA1withRSA");
+      TestUtil.createKeyStore(clientKS, clientPassword, "client", cKP.getPrivate(), cCert);
+      certs.put("client", cCert);
+    }
+
+    KeyPair sKP = TestUtil.generateKeyPair("RSA");
+    X509Certificate sCert = TestUtil.generateCertificate("CN=localhost, O=server", sKP, 30, "SHA1withRSA");
+    TestUtil.createKeyStore(serverKS, serverPassword, "server", sKP.getPrivate(), sCert);
+    certs.put("server", sCert);
+
+    if (trustStore) {
+      trustKS = keystoreDir + "/trustKS.jks";
+      TestUtil.createTrustStore(trustKS, trustPassword, certs);
+    }
+
+    HasConfig clientSSLConf = createClientSSLConfig(clientKS, clientPassword, clientPassword, trustKS, excludeCiphers);
+    HasConfig serverSSLConf = createServerSSLConfig(serverKS, serverPassword, serverPassword, trustKS, excludeCiphers);
+
+    saveConfig(sslClientConfFile, clientSSLConf);
+    saveConfig(sslServerConfFile, serverSSLConf);
+
+    conf.setString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
+    conf.setString(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfFile.getCanonicalPath());
+    conf.setString(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile.getCanonicalPath());
+    conf.setString(WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, sslServerConfFile.getAbsolutePath());
+    conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, useClientCert);
+  }
+
+  /**
+   * Create SSL configuration for a client.
+   *
+   * @param clientKS       String client keystore file
+   * @param password       String store password, or null to avoid setting store password
+   * @param keyPassword    String key password, or null to avoid setting key password
+   * @param trustKS        String truststore file
+   * @param excludeCiphers String comma separated ciphers to exclude
+   * @return Configuration for client SSL
+   */
+  private static HasConfig createClientSSLConfig(String clientKS, String password, String keyPassword,
+                                                 String trustKS, String excludeCiphers) {
+    return createSSLConfig(SSLFactory.Mode.CLIENT, clientKS, password, keyPassword, trustKS, excludeCiphers);
+  }
+
+  /**
+   * Creates SSL configuration for a server.
+   *
+   * @param serverKS       String server keystore file
+   * @param password       String store password, or null to avoid setting store password
+   * @param keyPassword    String key password, or null to avoid setting key password
+   * @param trustKS        String truststore file
+   * @param excludeCiphers String comma separated ciphers to exclude
+   * @return HasConfig
+   * @throws IOException e
+   */
+  private static HasConfig createServerSSLConfig(String serverKS, String password, String keyPassword,
+                                                 String trustKS, String excludeCiphers) throws IOException {
+    return createSSLConfig(SSLFactory.Mode.SERVER, serverKS, password, keyPassword, trustKS, excludeCiphers);
+  }
+
+  /**
+   * Returns the client SSL configuration file name.  Under parallel test
+   * execution, this file name is parametrized by a unique ID to ensure that
+   * concurrent tests don't collide on an SSL configuration file.
+   *
+   * @return client SSL configuration file name
+   */
+  private static String getClientSSLConfigFileName() {
+    return getSSLConfigFileName("ssl-client");
+  }
+
+  /**
+   * Returns the server SSL configuration file name.  Under parallel test
+   * execution, this file name is parametrized by a unique ID to ensure that
+   * concurrent tests don't collide on an SSL configuration file.
+   *
+   * @return client SSL configuration file name
+   */
+  private static String getServerSSLConfigFileName() {
+    return getSSLConfigFileName("ssl-server");
+  }
+
+  /**
+   * Returns an SSL configuration file name.  Under parallel test
+   * execution, this file name is parametrized by a unique ID to ensure that
+   * concurrent tests don't collide on an SSL configuration file.
+   *
+   * @param base the base of the file name
+   * @return SSL configuration file name for base
+   */
+  private static String getSSLConfigFileName(String base) {
+    String testUniqueForkId = System.getProperty("test.unique.fork.id");
+    String fileSuffix = testUniqueForkId != null ? "-" + testUniqueForkId : "";
+    return base + fileSuffix + ".xml";
+  }
+
+  /**
+   * Creates SSL configuration.
+   *
+   * @param mode        SSLFactory.Mode mode to configure
+   * @param keystore    String keystore file
+   * @param password    String store password, or null to avoid setting store password
+   * @param keyPassword String key password, or null to avoid setting key password
+   * @param trustKS     String truststore file
+   * @return Configuration for SSL
+   */
+  private static HasConfig createSSLConfig(SSLFactory.Mode mode, String keystore, String password,
+                                           String keyPassword, String trustKS, String excludeCiphers) {
+    String trustPassword = "trustP";
+
+    HasConfig sslConf = new HasConfig();
+    if (keystore != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_KEYSTORE_LOCATION_TPL_KEY), keystore);
+    }
+    if (password != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY), password);
+    }
+    if (keyPassword != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
+          keyPassword);
+    }
+    if (trustKS != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_TRUSTSTORE_LOCATION_TPL_KEY), trustKS);
+    }
+    if (trustPassword != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_TRUSTSTORE_PASSWORD_TPL_KEY),
+          trustPassword);
+    }
+    if (null != excludeCiphers && !excludeCiphers.isEmpty()) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_EXCLUDE_CIPHER_LIST),
+          excludeCiphers);
+    }
+    sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY), "1000");
+
+    return sslConf;
+  }
+
+  /**
+   * Saves configuration to a file.
+   *
+   * @param file File to save
+   * @param conf Configuration contents to write to file
+   * @throws IOException if there is an I/O error saving the file
+   */
+  private static void saveConfig(File file, HasConfig conf) throws IOException {
+    OutputStream output = new FileOutputStream(file);
+    Properties prop = new Properties();
+
+    // set the properties value
+    for (String name : conf.getNames()) {
+      prop.setProperty(name, conf.getString(name));
+    }
+
+    // save properties to project root folder
+    prop.store(output, null);
+  }
+}


[10/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/KeyStoresFactory.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/KeyStoresFactory.java b/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/KeyStoresFactory.java
new file mode 100644
index 0000000..a7ae53a
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/KeyStoresFactory.java
@@ -0,0 +1,254 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.kerby.has.common.ssl;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.util.StringUtils;
+import org.apache.kerby.kerberos.kerb.client.KrbConfig;
+
+import javax.net.ssl.KeyManager;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.TrustManager;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.GeneralSecurityException;
+import java.security.KeyStore;
+import java.text.MessageFormat;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Interface that gives access to {@link KeyManager} and {@link TrustManager}
+ * implementations.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class KeyStoresFactory extends KrbConfig {
+
+  private static final Log LOG =
+    LogFactory.getLog(KeyStoresFactory.class);
+
+  public static final String SSL_KEYSTORE_LOCATION_TPL_KEY =
+    "ssl.{0}.keystore.location";
+  public static final String SSL_KEYSTORE_PASSWORD_TPL_KEY =
+    "ssl.{0}.keystore.password";
+  public static final String SSL_KEYSTORE_KEYPASSWORD_TPL_KEY =
+    "ssl.{0}.keystore.keypassword";
+  public static final String SSL_KEYSTORE_TYPE_TPL_KEY =
+    "ssl.{0}.keystore.type";
+
+  public static final String SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY =
+    "ssl.{0}.truststore.reload.interval";
+  public static final String SSL_TRUSTSTORE_LOCATION_TPL_KEY =
+    "ssl.{0}.truststore.location";
+  public static final String SSL_TRUSTSTORE_PASSWORD_TPL_KEY =
+    "ssl.{0}.truststore.password";
+  public static final String SSL_TRUSTSTORE_TYPE_TPL_KEY =
+    "ssl.{0}.truststore.type";
+
+  /**
+   * Default format of the keystore files.
+   */
+  public static final String DEFAULT_KEYSTORE_TYPE = "jks";
+
+  /**
+   * Reload interval in milliseconds.
+   */
+  public static final long DEFAULT_SSL_TRUSTSTORE_RELOAD_INTERVAL = 10000;
+
+  private HasConfig conf;
+  private KeyManager[] keyManagers;
+  private TrustManager[] trustManagers;
+  private ReloadingX509TrustManager trustManager;
+
+  /**
+   * Sets the configuration for the factory.
+   *
+   * @param conf the configuration for the factory.
+   */
+  public void setConf(HasConfig conf) {
+    this.conf = conf;
+  }
+
+  /**
+   * Returns the configuration of the factory.
+   *
+   * @return the configuration of the factory.
+   */
+  public HasConfig getConf() {
+    return conf;
+  }
+
+
+  /**
+   * Initializes the keystores of the factory.
+   *
+   * @param mode if the keystores are to be used in client or server mode.
+   * @throws IOException thrown if the keystores could not be initialized due
+   * to an IO error.
+   * @throws GeneralSecurityException thrown if the keystores could not be
+   * initialized due to an security error.
+   */
+  public void init(SSLFactory.Mode mode) throws IOException, GeneralSecurityException {
+     boolean requireClientCert =
+      conf.getBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY,
+          SSLFactory.DEFAULT_SSL_REQUIRE_CLIENT_CERT);
+
+    // certificate store
+    String keystoreType =
+      conf.getString(resolvePropertyName(mode, SSL_KEYSTORE_TYPE_TPL_KEY),
+               DEFAULT_KEYSTORE_TYPE);
+    KeyStore keystore = KeyStore.getInstance(keystoreType);
+    String keystoreKeyPassword = null;
+    if (requireClientCert || mode == SSLFactory.Mode.SERVER) {
+      String locationProperty =
+        resolvePropertyName(mode, SSL_KEYSTORE_LOCATION_TPL_KEY);
+      String keystoreLocation = conf.getString(locationProperty, "");
+      if (keystoreLocation.isEmpty()) {
+        throw new GeneralSecurityException("The property '" + locationProperty
+            + "' has not been set in the ssl configuration file.");
+      }
+      String passwordProperty =
+        resolvePropertyName(mode, SSL_KEYSTORE_PASSWORD_TPL_KEY);
+      String keystorePassword = getPassword(conf, passwordProperty, "");
+      if (keystorePassword.isEmpty()) {
+        throw new GeneralSecurityException("The property '" + passwordProperty
+            + "' has not been set in the ssl configuration file.");
+      }
+      String keyPasswordProperty =
+        resolvePropertyName(mode, SSL_KEYSTORE_KEYPASSWORD_TPL_KEY);
+      // Key password defaults to the same value as store password for
+      // compatibility with legacy configurations that did not use a separate
+      // configuration property for key password.
+      keystoreKeyPassword = getPassword(
+          conf, keyPasswordProperty, keystorePassword);
+      LOG.debug(mode.toString() + " KeyStore: " + keystoreLocation);
+
+      InputStream is = new FileInputStream(keystoreLocation);
+      try {
+        keystore.load(is, keystorePassword.toCharArray());
+      } finally {
+        is.close();
+      }
+      LOG.debug(mode.toString() + " Loaded KeyStore: " + keystoreLocation);
+    } else {
+      keystore.load(null, null);
+    }
+    KeyManagerFactory keyMgrFactory = KeyManagerFactory
+        .getInstance(SSLFactory.SSLCERTIFICATE);
+
+    keyMgrFactory.init(keystore, (keystoreKeyPassword != null)
+        ? keystoreKeyPassword.toCharArray() : null);
+    keyManagers = keyMgrFactory.getKeyManagers();
+
+    //trust store
+    String truststoreType =
+      conf.getString(resolvePropertyName(mode, SSL_TRUSTSTORE_TYPE_TPL_KEY),
+               DEFAULT_KEYSTORE_TYPE);
+
+    String locationProperty =
+      resolvePropertyName(mode, SSL_TRUSTSTORE_LOCATION_TPL_KEY);
+    String truststoreLocation = conf.getString(locationProperty, "");
+    if (!truststoreLocation.isEmpty()) {
+      String passwordProperty = resolvePropertyName(mode,
+          SSL_TRUSTSTORE_PASSWORD_TPL_KEY);
+      String truststorePassword = getPassword(conf, passwordProperty, "");
+      if (truststorePassword.isEmpty()) {
+        throw new GeneralSecurityException("The property '" + passwordProperty
+            + "' has not been set in the ssl configuration file.");
+      }
+      long truststoreReloadInterval =
+          conf.getLong(resolvePropertyName(mode, SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY),
+              DEFAULT_SSL_TRUSTSTORE_RELOAD_INTERVAL);
+
+      LOG.debug(mode.toString() + " TrustStore: " + truststoreLocation);
+
+      trustManager = new ReloadingX509TrustManager(truststoreType,
+          truststoreLocation,
+          truststorePassword,
+          truststoreReloadInterval);
+      trustManager.init();
+      LOG.debug(mode.toString() + " Loaded TrustStore: " + truststoreLocation);
+      trustManagers = new TrustManager[]{trustManager};
+    } else {
+      LOG.debug("The property '" + locationProperty + "' has not been set, "
+          + "no TrustStore will be loaded");
+      trustManagers = null;
+    }
+  }
+
+  String getPassword(HasConfig conf, String alias, String defaultPass) {
+    String password = defaultPass;
+    password = conf.getString(alias);
+    return password;
+  }
+
+  /**
+   * Releases any resources being used.
+   */
+  public void destroy() {
+    if (trustManager != null) {
+      trustManager.destroy();
+      trustManager = null;
+      keyManagers = null;
+      trustManagers = null;
+    }
+  }
+
+  /**
+   * Returns the keymanagers for owned certificates.
+   *
+   * @return the keymanagers for owned certificates.
+   */
+  public KeyManager[] getKeyManagers() {
+    return keyManagers;
+  }
+
+  /**
+   * Returns the trustmanagers for trusted certificates.
+   *
+   * @return the trustmanagers for trusted certificates.
+   */
+  public TrustManager[] getTrustManagers() {
+    return trustManagers;
+  }
+
+    /**
+   * Resolves a property name to its client/server version if applicable.
+   * <p/>
+   * NOTE: This method is public for testing purposes.
+   *
+   * @param mode client/server mode.
+   * @param template property name template.
+   * @return the resolved property name.
+   */
+  @VisibleForTesting
+  public static String resolvePropertyName(SSLFactory.Mode mode,
+                                           String template) {
+    return MessageFormat.format(
+        template, StringUtils.toLowerCase(mode.toString()));
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/ReloadingX509TrustManager.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/ReloadingX509TrustManager.java b/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/ReloadingX509TrustManager.java
new file mode 100644
index 0000000..29ed038
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/ReloadingX509TrustManager.java
@@ -0,0 +1,208 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kerby.has.common.ssl;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.TrustManagerFactory;
+import javax.net.ssl.X509TrustManager;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.security.GeneralSecurityException;
+import java.security.KeyStore;
+import java.security.cert.CertificateException;
+import java.security.cert.X509Certificate;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * A {@link TrustManager} implementation that reloads its configuration when
+ * the truststore file on disk changes.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public final class ReloadingX509TrustManager
+  implements X509TrustManager, Runnable {
+
+  private static final Log LOG =
+    LogFactory.getLog(ReloadingX509TrustManager.class);
+
+  private String type;
+  private File file;
+  private String password;
+  private long lastLoaded;
+  private long reloadInterval;
+  private AtomicReference<X509TrustManager> trustManagerRef;
+
+  private volatile boolean running;
+  private Thread reloader;
+
+  /**
+   * Creates a reloadable trustmanager. The trustmanager reloads itself
+   * if the underlying trustore file has changed.
+   *
+   * @param type type of truststore file, typically 'jks'.
+   * @param location local path to the truststore file.
+   * @param password password of the truststore file.
+   * @param reloadInterval interval to check if the truststore file has
+   * changed, in milliseconds.
+   * @throws IOException thrown if the truststore could not be initialized due
+   * to an IO error.
+   * @throws GeneralSecurityException thrown if the truststore could not be
+   * initialized due to a security error.
+   */
+  public ReloadingX509TrustManager(String type, String location,
+                                   String password, long reloadInterval)
+    throws IOException, GeneralSecurityException {
+    this.type = type;
+    file = new File(location);
+    this.password = password;
+    trustManagerRef = new AtomicReference<X509TrustManager>();
+    trustManagerRef.set(loadTrustManager());
+    this.reloadInterval = reloadInterval;
+  }
+
+  /**
+   * Starts the reloader thread.
+   */
+  public void init() {
+    reloader = new Thread(this, "Truststore reloader thread");
+    reloader.setDaemon(true);
+    running =  true;
+    reloader.start();
+  }
+
+  /**
+   * Stops the reloader thread.
+   */
+  public void destroy() {
+    running = false;
+    reloader.interrupt();
+  }
+
+  /**
+   * Returns the reload check interval.
+   *
+   * @return the reload check interval, in milliseconds.
+   */
+  public long getReloadInterval() {
+    return reloadInterval;
+  }
+
+  @Override
+  public void checkClientTrusted(X509Certificate[] chain, String authType)
+    throws CertificateException {
+    X509TrustManager tm = trustManagerRef.get();
+    if (tm != null) {
+      tm.checkClientTrusted(chain, authType);
+    } else {
+      throw new CertificateException("Unknown client chain certificate: "
+          + chain[0].toString());
+    }
+  }
+
+  @Override
+  public void checkServerTrusted(X509Certificate[] chain, String authType)
+    throws CertificateException {
+    X509TrustManager tm = trustManagerRef.get();
+    if (tm != null) {
+      tm.checkServerTrusted(chain, authType);
+    } else {
+      throw new CertificateException("Unknown server chain certificate: "
+          + chain[0].toString());
+    }
+  }
+
+  private static final X509Certificate[] EMPTY = new X509Certificate[0];
+  @Override
+  public X509Certificate[] getAcceptedIssuers() {
+    X509Certificate[] issuers = EMPTY;
+    X509TrustManager tm = trustManagerRef.get();
+    if (tm != null) {
+      issuers = tm.getAcceptedIssuers();
+    }
+    return issuers;
+  }
+
+  boolean needsReload() {
+    boolean reload = true;
+    if (file.exists()) {
+      if (file.lastModified() == lastLoaded) {
+        reload = false;
+      }
+    } else {
+      lastLoaded = 0;
+    }
+    return reload;
+  }
+
+  X509TrustManager loadTrustManager()
+  throws IOException, GeneralSecurityException {
+    X509TrustManager trustManager = null;
+    KeyStore ks = KeyStore.getInstance(type);
+    lastLoaded = file.lastModified();
+    FileInputStream in = new FileInputStream(file);
+    try {
+      ks.load(in, password.toCharArray());
+      LOG.debug("Loaded truststore '" + file + "'");
+    } finally {
+      in.close();
+    }
+
+    TrustManagerFactory trustManagerFactory = 
+      TrustManagerFactory.getInstance(SSLFactory.SSLCERTIFICATE);
+    trustManagerFactory.init(ks);
+    TrustManager[] trustManagers = trustManagerFactory.getTrustManagers();
+    for (TrustManager trustManager1 : trustManagers) {
+      if (trustManager1 instanceof X509TrustManager) {
+        trustManager = (X509TrustManager) trustManager1;
+        break;
+      }
+    }
+    return trustManager;
+  }
+
+  @Override
+  public void run() {
+    while (running) {
+      try {
+        Thread.sleep(reloadInterval);
+      } catch (InterruptedException e) {
+        //NOP
+      }
+      if (running && needsReload()) {
+        try {
+          trustManagerRef.set(loadTrustManager());
+        } catch (Exception ex) {
+          LOG.warn("Could not load truststore (keep using existing one) : "
+              + ex.toString(), ex);
+        }
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/SSLFactory.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/SSLFactory.java b/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/SSLFactory.java
new file mode 100644
index 0000000..c16d0f4
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/SSLFactory.java
@@ -0,0 +1,290 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.kerby.has.common.ssl;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.util.ConnectionConfigurator;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.util.StringUtils;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLEngine;
+import javax.net.ssl.SSLServerSocketFactory;
+import javax.net.ssl.SSLSocketFactory;
+import java.io.File;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.security.GeneralSecurityException;
+
+import static org.apache.kerby.has.common.util.PlatformName.IBM_JAVA;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Factory that creates SSLEngine and SSLSocketFactory instances using
+ * Hadoop configuration information.
+ * <p/>
+ * which reloads public keys if the truststore file changes.
+ * <p/>
+ * This factory is used to configure HTTPS in Hadoop HTTP based endpoints, both
+ * client and server.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class SSLFactory implements ConnectionConfigurator {
+
+  @InterfaceAudience.Private
+  public enum Mode {
+    CLIENT, SERVER
+  }
+
+  public static final String SSL_REQUIRE_CLIENT_CERT_KEY =
+    "hadoop.ssl.require.client.CERT";
+  public static final String SSL_HOSTNAME_VERIFIER_KEY =
+    "hadoop.ssl.hostname.verifier";
+  public static final String SSL_CLIENT_CONF_KEY =
+    "hadoop.ssl.client.conf";
+  public static final String SSL_SERVER_CONF_KEY =
+      "hadoop.ssl.server.conf";
+  public static final String SSLCERTIFICATE = IBM_JAVA ? "ibmX509" : "SunX509";
+
+  public static final boolean DEFAULT_SSL_REQUIRE_CLIENT_CERT = false;
+
+  public static final String KEYSTORES_FACTORY_CLASS_KEY =
+    "hadoop.ssl.keystores.factory.class";
+
+  public static final String SSL_ENABLED_PROTOCOLS =
+      "hadoop.ssl.enabled.protocols";
+  public static final String DEFAULT_SSL_ENABLED_PROTOCOLS = "TLSv1";
+
+  private HasConfig conf;
+  private Mode mode;
+  private boolean requireClientCert;
+  private SSLContext context;
+  private HostnameVerifier hostnameVerifier;
+  private KeyStoresFactory keystoresFactory;
+
+  private String[] enabledProtocols = null;
+
+  /**
+   * Creates an SSLFactory.
+   *
+   * @param mode SSLFactory mode, client or server.
+   * @param conf Hadoop configuration from where the SSLFactory configuration
+   * will be read.
+   */
+  public SSLFactory(Mode mode, HasConfig conf) throws HasException {
+    this.conf = conf;
+    if (mode == null) {
+      throw new IllegalArgumentException("mode cannot be NULL");
+    }
+    this.mode = mode;
+    requireClientCert = conf.getBoolean(SSL_REQUIRE_CLIENT_CERT_KEY,
+                                        DEFAULT_SSL_REQUIRE_CLIENT_CERT);
+    HasConfig sslConf = readSSLConfiguration(mode);
+
+    keystoresFactory = new KeyStoresFactory();
+    keystoresFactory.setConf(sslConf);
+
+    enabledProtocols = new String[] {DEFAULT_SSL_ENABLED_PROTOCOLS};
+  }
+
+  private HasConfig readSSLConfiguration(Mode mode) throws HasException {
+    HasConfig sslConf = new HasConfig();
+    sslConf.setBoolean(SSL_REQUIRE_CLIENT_CERT_KEY, requireClientCert);
+    String sslConfResource;
+    if (mode == Mode.CLIENT) {
+      sslConfResource = conf.getString(SSLFactory.SSL_CLIENT_CONF_KEY);
+    } else {
+      sslConfResource = conf.getString(SSLFactory.SSL_CLIENT_CONF_KEY);
+    }
+    try {
+      sslConf.addIniConfig(new File(sslConfResource));
+    } catch (IOException e) {
+      throw new HasException(e);
+    }
+    return sslConf;
+  }
+
+  /**
+   * Initializes the factory.
+   *
+   * @throws  GeneralSecurityException thrown if an SSL initialization error
+   * happened.
+   * @throws IOException thrown if an IO error happened while reading the SSL
+   * configuration.
+   */
+  public void init() throws GeneralSecurityException, IOException {
+    keystoresFactory.init(mode);
+    context = SSLContext.getInstance("TLS");
+    context.init(keystoresFactory.getKeyManagers(),
+                 keystoresFactory.getTrustManagers(), null);
+    context.getDefaultSSLParameters().setProtocols(enabledProtocols);
+    hostnameVerifier = getHostnameVerifier(conf);
+  }
+
+  private HostnameVerifier getHostnameVerifier(HasConfig conf)
+      throws GeneralSecurityException, IOException {
+    return getHostnameVerifier(StringUtils.toUpperCase(
+        conf.getString(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").trim()));
+  }
+
+  public static HostnameVerifier getHostnameVerifier(String verifier)
+    throws GeneralSecurityException, IOException {
+    HostnameVerifier hostnameVerifier;
+    if (verifier.equals("DEFAULT")) {
+      hostnameVerifier = SSLHostnameVerifier.DEFAULT;
+    } else if (verifier.equals("DEFAULT_AND_LOCALHOST")) {
+      hostnameVerifier = SSLHostnameVerifier.DEFAULT_AND_LOCALHOST;
+    } else if (verifier.equals("STRICT")) {
+      hostnameVerifier = SSLHostnameVerifier.STRICT;
+    } else if (verifier.equals("STRICT_IE6")) {
+      hostnameVerifier = SSLHostnameVerifier.STRICT_IE6;
+    } else if (verifier.equals("ALLOW_ALL")) {
+      hostnameVerifier = SSLHostnameVerifier.ALLOW_ALL;
+    } else {
+      throw new GeneralSecurityException("Invalid hostname verifier: "
+          + verifier);
+    }
+    return hostnameVerifier;
+  }
+
+  /**
+   * Releases any resources being used.
+   */
+  public void destroy() {
+    keystoresFactory.destroy();
+  }
+  /**
+   * Returns the SSLFactory KeyStoresFactory instance.
+   *
+   * @return the SSLFactory KeyStoresFactory instance.
+   */
+  public KeyStoresFactory getKeystoresFactory() {
+    return keystoresFactory;
+  }
+
+  /**
+   * Returns a configured SSLEngine.
+   *
+   * @return the configured SSLEngine.
+   * @throws GeneralSecurityException thrown if the SSL engine could not
+   * be initialized.
+   * @throws IOException thrown if and IO error occurred while loading
+   * the server keystore.
+   */
+  public SSLEngine createSSLEngine()
+    throws GeneralSecurityException, IOException {
+    SSLEngine sslEngine = context.createSSLEngine();
+    if (mode == Mode.CLIENT) {
+      sslEngine.setUseClientMode(true);
+    } else {
+      sslEngine.setUseClientMode(false);
+      sslEngine.setNeedClientAuth(requireClientCert);
+    }
+    sslEngine.setEnabledProtocols(enabledProtocols);
+    return sslEngine;
+  }
+
+  /**
+   * Returns a configured SSLServerSocketFactory.
+   *
+   * @return the configured SSLSocketFactory.
+   * @throws GeneralSecurityException thrown if the SSLSocketFactory could not
+   * be initialized.
+   * @throws IOException thrown if and IO error occurred while loading
+   * the server keystore.
+   */
+  public SSLServerSocketFactory createSSLServerSocketFactory()
+    throws GeneralSecurityException, IOException {
+    if (mode != Mode.SERVER) {
+      throw new IllegalStateException("Factory is in CLIENT mode");
+    }
+    return context.getServerSocketFactory();
+  }
+
+  /**
+   * Returns a configured SSLSocketFactory.
+   *
+   * @return the configured SSLSocketFactory.
+   * @throws GeneralSecurityException thrown if the SSLSocketFactory could not
+   * be initialized.
+   * @throws IOException thrown if and IO error occurred while loading
+   * the server keystore.
+   */
+  public SSLSocketFactory createSSLSocketFactory()
+    throws GeneralSecurityException, IOException {
+    if (mode != Mode.CLIENT) {
+      throw new IllegalStateException("Factory is in CLIENT mode");
+    }
+    return context.getSocketFactory();
+  }
+
+  /**
+   * Returns the hostname verifier it should be used in HttpsURLConnections.
+   *
+   * @return the hostname verifier.
+   */
+  public HostnameVerifier getHostnameVerifier() {
+    if (mode != Mode.CLIENT) {
+      throw new IllegalStateException("Factory is in CLIENT mode");
+    }
+    return hostnameVerifier;
+  }
+
+  /**
+   * Returns if client certificates are required or not.
+   *
+   * @return if client certificates are required or not.
+   */
+  public boolean isClientCertRequired() {
+    return requireClientCert;
+  }
+
+  /**
+   * If the given {@link HttpURLConnection} is an {@link HttpsURLConnection}
+   * configures the connection with the {@link SSLSocketFactory} and
+   * {@link HostnameVerifier} of this SSLFactory, otherwise does nothing.
+   *
+   * @param conn the {@link HttpURLConnection} instance to configure.
+   * @return the configured {@link HttpURLConnection} instance.
+   *
+   * @throws IOException if an IO error occurred.
+   */
+  @Override
+  public HttpURLConnection configure(HttpURLConnection conn)
+    throws IOException {
+    if (conn instanceof HttpsURLConnection) {
+      HttpsURLConnection sslConn = (HttpsURLConnection) conn;
+      try {
+        sslConn.setSSLSocketFactory(createSSLSocketFactory());
+      } catch (GeneralSecurityException ex) {
+        throw new IOException(ex);
+      }
+      sslConn.setHostnameVerifier(getHostnameVerifier());
+      conn = sslConn;
+    }
+    return conn;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/SSLHostnameVerifier.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/SSLHostnameVerifier.java b/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/SSLHostnameVerifier.java
new file mode 100644
index 0000000..d3be435
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/ssl/SSLHostnameVerifier.java
@@ -0,0 +1,615 @@
+/*
+ * $HeadURL$
+ * $Revision$
+ * $Date$
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ *
+ * This software consists of voluntary contributions made by many
+ * individuals on behalf of the Apache Software Foundation.  For more
+ * information on the Apache Software Foundation, please see
+ * <http://www.apache.org/>.
+ *
+ */
+
+package org.apache.kerby.has.common.ssl;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.cert.Certificate;
+import java.security.cert.CertificateParsingException;
+import java.security.cert.X509Certificate;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.StringTokenizer;
+import java.util.TreeSet;
+
+import javax.net.ssl.SSLException;
+import javax.net.ssl.SSLPeerUnverifiedException;
+import javax.net.ssl.SSLSession;
+import javax.net.ssl.SSLSocket;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.kerby.has.common.util.StringUtils;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ ************************************************************************
+ * Copied from the not-yet-commons-ssl project at
+ * http://juliusdavies.ca/commons-ssl/
+ * This project is not yet in Apache, but it is Apache 2.0 licensed.
+ ************************************************************************
+ * Interface for checking if a hostname matches the names stored inside the
+ * server's X.509 certificate.  Correctly implements
+ * javax.net.ssl.HostnameVerifier, but that interface is not recommended.
+ * Instead we added several check() methods that take SSLSocket,
+ * or X509Certificate, or ultimately (they all end up calling this one),
+ * String.  (It's easier to supply JUnit with Strings instead of mock
+ * SSLSession objects!)
+ * </p><p>Our check() methods throw exceptions if the name is
+ * invalid, whereas javax.net.ssl.HostnameVerifier just returns true/false.
+ * <p/>
+ * We provide the HostnameVerifier.DEFAULT, HostnameVerifier.STRICT, and
+ * HostnameVerifier.ALLOW_ALL implementations.  We also provide the more
+ * specialized HostnameVerifier.DEFAULT_AND_LOCALHOST, as well as
+ * HostnameVerifier.STRICT_IE6.  But feel free to define your own
+ * implementations!
+ * <p/>
+ * Inspired by Sebastian Hauer's original StrictSSLProtocolSocketFactory in the
+ * HttpClient "contrib" repository.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
+
+    @Override
+    boolean verify(String host, SSLSession session);
+
+    void check(String host, SSLSocket ssl) throws IOException;
+
+    void check(String host, X509Certificate cert) throws SSLException;
+
+    void check(String host, String[] cns, String[] subjectAlts)
+        throws SSLException;
+
+    void check(String[] hosts, SSLSocket ssl) throws IOException;
+
+    void check(String[] hosts, X509Certificate cert) throws SSLException;
+
+
+    /**
+     * Checks to see if the supplied hostname matches any of the supplied CNs
+     * or "DNS" Subject-Alts.  Most implementations only look at the first CN,
+     * and ignore any additional CNs.  Most implementations do look at all of
+     * the "DNS" Subject-Alts. The CNs or Subject-Alts may contain wildcards
+     * according to RFC 2818.
+     *
+     * @param cns         CN fields, in order, as extracted from the X.509
+     *                    certificate.
+     * @param subjectAlts Subject-Alt fields of type 2 ("DNS"), as extracted
+     *                    from the X.509 certificate.
+     * @param hosts       The array of hostnames to verify.
+     * @throws SSLException If verification failed.
+     */
+    void check(String[] hosts, String[] cns, String[] subjectAlts)
+        throws SSLException;
+
+
+    /**
+     * The DEFAULT HostnameVerifier works the same way as Curl and Firefox.
+     * <p/>
+     * The hostname must match either the first CN, or any of the subject-alts.
+     * A wildcard can occur in the CN, and in any of the subject-alts.
+     * <p/>
+     * The only difference between DEFAULT and STRICT is that a wildcard (such
+     * as "*.foo.com") with DEFAULT matches all subdomains, including
+     * "a.b.foo.com".
+     */
+    SSLHostnameVerifier DEFAULT =
+        new AbstractVerifier() {
+            @Override
+            public final void check(final String[] hosts, final String[] cns,
+                                    final String[] subjectAlts)
+                throws SSLException {
+                check(hosts, cns, subjectAlts, false, false);
+            }
+
+            @Override
+            public final String toString() {
+                return "DEFAULT";
+            }
+        };
+
+
+    /**
+     * The DEFAULT_AND_LOCALHOST HostnameVerifier works like the DEFAULT
+     * one with one additional relaxation:  a host of "localhost",
+     * "localhost.localdomain", "127.0.0.1", "::1" will always pass, no matter
+     * what is in the server's certificate.
+     */
+    SSLHostnameVerifier DEFAULT_AND_LOCALHOST =
+        new AbstractVerifier() {
+            @Override
+            public final void check(final String[] hosts, final String[] cns,
+                                    final String[] subjectAlts)
+                throws SSLException {
+                if (isLocalhost(hosts[0])) {
+                    return;
+                }
+                check(hosts, cns, subjectAlts, false, false);
+            }
+
+            @Override
+            public final String toString() {
+                return "DEFAULT_AND_LOCALHOST";
+            }
+        };
+
+    /**
+     * The STRICT HostnameVerifier works the same way as java.net.URL in Sun
+     * Java 1.4, Sun Java 5, Sun Java 6.  It's also pretty close to IE6.
+     * This implementation appears to be compliant with RFC 2818 for dealing
+     * with wildcards.
+     * <p/>
+     * The hostname must match either the first CN, or any of the subject-alts.
+     * A wildcard can occur in the CN, and in any of the subject-alts.  The
+     * one divergence from IE6 is how we only check the first CN.  IE6 allows
+     * a match against any of the CNs present.  We decided to follow in
+     * Sun Java 1.4's footsteps and only check the first CN.
+     * <p/>
+     * A wildcard such as "*.foo.com" matches only subdomains in the same
+     * level, for example "a.foo.com".  It does not match deeper subdomains
+     * such as "a.b.foo.com".
+     */
+    SSLHostnameVerifier STRICT =
+        new AbstractVerifier() {
+            @Override
+            public final void check(final String[] host, final String[] cns,
+                                    final String[] subjectAlts)
+                throws SSLException {
+                check(host, cns, subjectAlts, false, true);
+            }
+
+            @Override
+            public final String toString() {
+                return "STRICT";
+            }
+        };
+
+    /**
+     * The STRICT_IE6 HostnameVerifier works just like the STRICT one with one
+     * minor variation:  the hostname can match against any of the CN's in the
+     * server's certificate, not just the first one.  This behaviour is
+     * identical to IE6's behaviour.
+     */
+    SSLHostnameVerifier STRICT_IE6 =
+        new AbstractVerifier() {
+            @Override
+            public final void check(final String[] host, final String[] cns,
+                                    final String[] subjectAlts)
+                throws SSLException {
+                check(host, cns, subjectAlts, true, true);
+            }
+
+            @Override
+            public final String toString() {
+                return "STRICT_IE6";
+            }
+        };
+
+    /**
+     * The ALLOW_ALL HostnameVerifier essentially turns hostname verification
+     * off.  This implementation is a no-op, and never throws the SSLException.
+     */
+    SSLHostnameVerifier ALLOW_ALL =
+        new AbstractVerifier() {
+            @Override
+            public final void check(final String[] host, final String[] cns,
+                                    final String[] subjectAlts) {
+                // Allow everything - so never blowup.
+            }
+
+            @Override
+            public final String toString() {
+                return "ALLOW_ALL";
+            }
+        };
+
+    abstract class AbstractVerifier implements SSLHostnameVerifier {
+
+        /**
+         * This contains a list of 2nd-level domains that aren't allowed to
+         * have wildcards when combined with country-codes.
+         * For example: [*.co.uk].
+         * <p/>
+         * The [*.co.uk] problem is an interesting one.  Should we just hope
+         * that CA's would never foolishly allow such a certificate to happen?
+         * Looks like we're the only implementation guarding against this.
+         * Firefox, Curl, Sun Java 1.4, 5, 6 don't bother with this check.
+         */
+        private static final String[] BAD_COUNTRY_2LDS =
+            {"ac", "co", "com", "ed", "edu", "go", "gouv", "gov", "info",
+                "lg", "ne", "net", "or", "org"};
+
+        private static final String[] LOCALHOSTS = {"::1", "127.0.0.1",
+            "localhost",
+            "localhost.localdomain"};
+
+
+        static {
+            // Just in case developer forgot to manually sort the array.  :-)
+            Arrays.sort(BAD_COUNTRY_2LDS);
+            Arrays.sort(LOCALHOSTS);
+        }
+
+        protected AbstractVerifier() {
+        }
+
+        /**
+         * The javax.net.ssl.HostnameVerifier contract.
+         *
+         * @param host    'hostname' we used to create our socket
+         * @param session SSLSession with the remote server
+         * @return true if the host matched the one in the certificate.
+         */
+        @Override
+        public boolean verify(String host, SSLSession session) {
+            try {
+                Certificate[] certs = session.getPeerCertificates();
+                X509Certificate x509 = (X509Certificate) certs[0];
+                check(new String[]{host}, x509);
+                return true;
+            } catch (SSLException e) {
+                return false;
+            }
+        }
+
+        @Override
+        public void check(String host, SSLSocket ssl) throws IOException {
+            check(new String[]{host}, ssl);
+        }
+
+        @Override
+        public void check(String host, X509Certificate cert)
+            throws SSLException {
+            check(new String[]{host}, cert);
+        }
+
+        @Override
+        public void check(String host, String[] cns, String[] subjectAlts)
+            throws SSLException {
+            check(new String[]{host}, cns, subjectAlts);
+        }
+
+        @Override
+        public void check(String[] host, SSLSocket ssl)
+            throws IOException {
+            if (host == null) {
+                throw new NullPointerException("host to verify is null");
+            }
+
+            SSLSession session = ssl.getSession();
+            if (session == null) {
+                // In our experience this only happens under IBM 1.4.x when
+                // spurious (unrelated) certificates show up in the server'
+                // chain.  Hopefully this will unearth the real problem:
+                InputStream in = ssl.getInputStream();
+                in.available();
+                /*
+                  If you're looking at the 2 lines of code above because
+                  you're running into a problem, you probably have two
+                  options:
+
+                    #1.  Clean up the certificate chain that your server
+                         is presenting (e.g. edit "/etc/apache2/server.crt"
+                         or wherever it is your server's certificate chain
+                         is defined).
+
+                                               OR
+
+                    #2.   Upgrade to an IBM 1.5.x or greater JVM, or switch
+                          to a non-IBM JVM.
+                */
+
+                // If ssl.getInputStream().available() didn't cause an
+                // exception, maybe at least now the session is available?
+                session = ssl.getSession();
+                if (session == null) {
+                    // If it's still null, probably a startHandshake() will
+                    // unearth the real problem.
+                    ssl.startHandshake();
+
+                    // Okay, if we still haven't managed to cause an exception,
+                    // might as well go for the NPE.  Or maybe we're okay now?
+                    session = ssl.getSession();
+                }
+            }
+            Certificate[] certs;
+            try {
+                certs = session.getPeerCertificates();
+            } catch (SSLPeerUnverifiedException spue) {
+                InputStream in = ssl.getInputStream();
+                in.available();
+                // Didn't trigger anything interesting?  Okay, just throw
+                // original.
+                throw spue;
+            }
+            X509Certificate x509 = (X509Certificate) certs[0];
+            check(host, x509);
+        }
+
+        @Override
+        public void check(String[] host, X509Certificate cert)
+            throws SSLException {
+            String[] cns = Certificates.getCNs(cert);
+            String[] subjectAlts = Certificates.getDNSSubjectAlts(cert);
+            check(host, cns, subjectAlts);
+        }
+
+        public void check(final String[] hosts, final String[] cns,
+                          final String[] subjectAlts, final boolean ie6,
+                          final boolean strictWithSubDomains)
+            throws SSLException {
+            // Build up lists of allowed hosts For logging/debugging purposes.
+            StringBuffer buf = new StringBuffer(32);
+            buf.append('<');
+            for (int i = 0; i < hosts.length; i++) {
+                String h = hosts[i];
+                h = h != null ? StringUtils.toLowerCase(h.trim()) : "";
+                hosts[i] = h;
+                if (i > 0) {
+                    buf.append('/');
+                }
+                buf.append(h);
+            }
+            buf.append('>');
+            String hostnames = buf.toString();
+            // Build the list of names we're going to check.  Our DEFAULT and
+            // STRICT implementations of the HostnameVerifier only use the
+            // first CN provided.  All other CNs are ignored.
+            // (Firefox, wget, curl, Sun Java 1.4, 5, 6 all work this way).
+            final Set<String> names = new TreeSet<String>();
+            if (cns != null && cns.length > 0 && cns[0] != null) {
+                names.add(cns[0]);
+                if (ie6) {
+                    for (int i = 1; i < cns.length; i++) {
+                        names.add(cns[i]);
+                    }
+                }
+            }
+            if (subjectAlts != null) {
+                for (int i = 0; i < subjectAlts.length; i++) {
+                    if (subjectAlts[i] != null) {
+                        names.add(subjectAlts[i]);
+                    }
+                }
+            }
+            if (names.isEmpty()) {
+                String msg = "Certificate for " + hosts[0] + " doesn't contain CN or DNS subjectAlt";
+                throw new SSLException(msg);
+            }
+
+            // StringBuffer for building the error message.
+            buf = new StringBuffer();
+
+            boolean match = false;
+            out:
+            for (Iterator<String> it = names.iterator(); it.hasNext();) {
+                // Don't trim the CN, though!
+                final String cn = StringUtils.toLowerCase(it.next());
+                // Store CN in StringBuffer in case we need to report an error.
+                buf.append(" <");
+                buf.append(cn);
+                buf.append('>');
+                if (it.hasNext()) {
+                    buf.append(" OR");
+                }
+
+                // The CN better have at least two dots if it wants wildcard
+                // action.  It also can't be [*.co.uk] or [*.co.jp] or
+                // [*.org.uk], etc...
+                boolean doWildcard = cn.startsWith("*.")
+                    && cn.lastIndexOf('.') >= 0
+                    && !isIP4Address(cn)
+                    && acceptableCountryWildcard(cn);
+
+                for (int i = 0; i < hosts.length; i++) {
+                    final String hostName =
+                        StringUtils.toLowerCase(hosts[i].trim());
+                    if (doWildcard) {
+                        match = hostName.endsWith(cn.substring(1));
+                        if (match && strictWithSubDomains) {
+                            // If we're in strict mode, then [*.foo.com] is not
+                            // allowed to match [a.b.foo.com]
+                            match = countDots(hostName) == countDots(cn);
+                        }
+                    } else {
+                        match = hostName.equals(cn);
+                    }
+                    if (match) {
+                        break out;
+                    }
+                }
+            }
+            if (!match) {
+                throw new SSLException("hostname in certificate didn't match: " + hostnames + " !=" + buf);
+            }
+        }
+
+        public static boolean isIP4Address(final String cn) {
+            boolean isIP4 = true;
+            String tld = cn;
+            int x = cn.lastIndexOf('.');
+            // We only bother analyzing the characters after the final dot
+            // in the name.
+            if (x >= 0 && x + 1 < cn.length()) {
+                tld = cn.substring(x + 1);
+            }
+            for (int i = 0; i < tld.length(); i++) {
+                if (!Character.isDigit(tld.charAt(0))) {
+                    isIP4 = false;
+                    break;
+                }
+            }
+            return isIP4;
+        }
+
+        public static boolean acceptableCountryWildcard(final String cn) {
+            int cnLen = cn.length();
+            if (cnLen >= 7 && cnLen <= 9) {
+                // Look for the '.' in the 3rd-last position:
+                if (cn.charAt(cnLen - 3) == '.') {
+                    // Trim off the [*.] and the [.XX].
+                    String s = cn.substring(2, cnLen - 3);
+                    // And test against the sorted array of bad 2lds:
+                    int x = Arrays.binarySearch(BAD_COUNTRY_2LDS, s);
+                    return x < 0;
+                }
+            }
+            return true;
+        }
+
+        public static boolean isLocalhost(String host) {
+            host = host != null ? StringUtils.toLowerCase(host.trim()) : "";
+            if (host.startsWith("::1")) {
+                int x = host.lastIndexOf('%');
+                if (x >= 0) {
+                    host = host.substring(0, x);
+                }
+            }
+            int x = Arrays.binarySearch(LOCALHOSTS, host);
+            return x >= 0;
+        }
+
+        /**
+         * Counts the number of dots "." in a string.
+         *
+         * @param s string to count dots from
+         * @return number of dots
+         */
+        public static int countDots(final String s) {
+            int count = 0;
+            for (int i = 0; i < s.length(); i++) {
+                if (s.charAt(i) == '.') {
+                    count++;
+                }
+            }
+            return count;
+        }
+    }
+
+    class Certificates {
+      public static String[] getCNs(X509Certificate cert) {
+        final List<String> cnList = new LinkedList<String>();
+        /*
+          Sebastian Hauer's original StrictSSLProtocolSocketFactory used
+          getName() and had the following comment:
+
+             Parses a X.500 distinguished name for the value of the
+             "Common Name" field.  This is done a bit sloppy right
+             now and should probably be done a bit more according to
+             <code>RFC 2253</code>.
+
+           I've noticed that toString() seems to do a better job than
+           getName() on these X500Principal objects, so I'm hoping that
+           addresses Sebastian's concern.
+
+           For example, getName() gives me this:
+           1.2.840.113549.1.9.1=#16166a756c6975736461766965734063756362632e636f6d
+
+           whereas toString() gives me this:
+           EMAILADDRESS=juliusdavies@cucbc.com
+
+           Looks like toString() even works with non-ascii domain names!
+           I tested it with "&#x82b1;&#x5b50;.co.jp" and it worked fine.
+          */
+        String subjectPrincipal = cert.getSubjectX500Principal().toString();
+        StringTokenizer st = new StringTokenizer(subjectPrincipal, ",");
+        while (st.hasMoreTokens()) {
+            String tok = st.nextToken();
+            int x = tok.indexOf("CN=");
+            if (x >= 0) {
+                cnList.add(tok.substring(x + 3));
+            }
+        }
+        if (!cnList.isEmpty()) {
+            String[] cns = new String[cnList.size()];
+            cnList.toArray(cns);
+            return cns;
+        } else {
+            return null;
+        }
+      }
+
+
+      /**
+       * Extracts the array of SubjectAlt DNS names from an X509Certificate.
+       * Returns null if there aren't any.
+       * <p/>
+       * Note:  Java doesn't appear able to extract international characters
+       * from the SubjectAlts.  It can only extract international characters
+       * from the CN field.
+       * <p/>
+       * (Or maybe the version of OpenSSL I'm using to test isn't storing the
+       * international characters correctly in the SubjectAlts?).
+       *
+       * @param cert X509Certificate
+       * @return Array of SubjectALT DNS names stored in the certificate.
+       */
+      public static String[] getDNSSubjectAlts(X509Certificate cert) {
+          final List<String> subjectAltList = new LinkedList<String>();
+          Collection<List<?>> c = null;
+          try {
+              c = cert.getSubjectAlternativeNames();
+          } catch (CertificateParsingException cpe) {
+              // Should probably log.debug() this?
+              cpe.printStackTrace();
+          }
+          if (c != null) {
+              Iterator<List<?>> it = c.iterator();
+              while (it.hasNext()) {
+                  List<?> list = it.next();
+                  int type = ((Integer) list.get(0)).intValue();
+                  // If type is 2, then we've got a dNSName
+                  if (type == 2) {
+                      String s = (String) list.get(1);
+                      subjectAltList.add(s);
+                  }
+              }
+          }
+          if (!subjectAltList.isEmpty()) {
+              String[] subjectAlts = new String[subjectAltList.size()];
+              subjectAltList.toArray(subjectAlts);
+              return subjectAlts;
+          } else {
+              return null;
+          }
+      }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/util/ConnectionConfigurator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/util/ConnectionConfigurator.java b/has/has-common/src/main/java/org/apache/kerby/has/common/util/ConnectionConfigurator.java
new file mode 100644
index 0000000..c913e59
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/util/ConnectionConfigurator.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.kerby.has.common.util;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Interface to configure  {@link HttpURLConnection} created by
+ * {@link org.apache.kerby.has.common.spnego.AuthenticatedURL} instances.
+ */
+public interface ConnectionConfigurator {
+
+  /**
+   * Configures the given {@link HttpURLConnection} instance.
+   *
+   * @param conn the {@link HttpURLConnection} instance to configure.
+   * @return the configured {@link HttpURLConnection} instance.
+   * 
+   * @throws IOException if an IO error occurred.
+   */
+  HttpURLConnection configure(HttpURLConnection conn) throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/util/HasJaasLoginUtil.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/util/HasJaasLoginUtil.java b/has/has-common/src/main/java/org/apache/kerby/has/common/util/HasJaasLoginUtil.java
new file mode 100644
index 0000000..57f57f3
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/util/HasJaasLoginUtil.java
@@ -0,0 +1,261 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.common.util;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
+import java.io.File;
+import java.io.IOException;
+import java.security.Principal;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * JAAS utilities for Has login.
+ */
+public class HasJaasLoginUtil {
+    public static final Logger LOG = LoggerFactory.getLogger(HasJaasLoginUtil.class);
+
+    public static final boolean ENABLE_DEBUG = true;
+
+    private static String getKrb5LoginModuleName() {
+        return System.getProperty("java.vendor").contains("IBM")
+            ? "com.ibm.security.auth.module.Krb5LoginModule"
+            : "org.apache.kerby.has.client.HasLoginModule";
+    }
+
+    /**
+     * Log a user in from a tgt ticket.
+     *
+     * @throws IOException
+     */
+    public static synchronized Subject loginUserFromTgtTicket(String hadoopSecurityHas) throws IOException {
+
+        TICKET_KERBEROS_OPTIONS.put("hadoopSecurityHas", hadoopSecurityHas);
+        Subject subject = new Subject();
+        Configuration conf = new HasJaasConf();
+        String confName = "ticket-kerberos";
+        LoginContext loginContext = null;
+        try {
+            loginContext = new LoginContext(confName, subject, null, conf);
+        } catch (LoginException e) {
+            throw new IOException("Fail to create LoginContext for " + e);
+        }
+        try {
+            loginContext.login();
+            LOG.info("Login successful for user "
+                + subject.getPrincipals().iterator().next().getName());
+        } catch (LoginException e) {
+            throw new IOException("Login failure for " + e);
+        }
+        return loginContext.getSubject();
+    }
+
+    /**
+     * Has Jaas config.
+     */
+    static class HasJaasConf extends Configuration {
+        @Override
+        public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+
+            return new AppConfigurationEntry[]{
+                TICKET_KERBEROS_LOGIN};
+        }
+    }
+
+    private static final Map<String, String> BASIC_JAAS_OPTIONS =
+        new HashMap<String, String>();
+
+    static {
+        String jaasEnvVar = System.getenv("HADOOP_JAAS_DEBUG");
+        if (jaasEnvVar != null && "true".equalsIgnoreCase(jaasEnvVar)) {
+            BASIC_JAAS_OPTIONS.put("debug", String.valueOf(ENABLE_DEBUG));
+        }
+    }
+
+    private static final Map<String, String> TICKET_KERBEROS_OPTIONS =
+        new HashMap<String, String>();
+
+    static {
+        TICKET_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+        TICKET_KERBEROS_OPTIONS.put("useTgtTicket", "true");
+        TICKET_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
+    }
+
+    private static final AppConfigurationEntry TICKET_KERBEROS_LOGIN =
+        new AppConfigurationEntry(getKrb5LoginModuleName(),
+            AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL,
+            TICKET_KERBEROS_OPTIONS);
+
+
+     public static Subject loginUsingTicketCache(
+        String principal, File cacheFile) throws IOException {
+        Set<Principal> principals = new HashSet<Principal>();
+        principals.add(new KerberosPrincipal(principal));
+
+        Subject subject = new Subject(false, principals,
+            new HashSet<Object>(), new HashSet<Object>());
+
+        Configuration conf = useTicketCache(principal, cacheFile);
+        String confName = "TicketCacheConf";
+         LoginContext loginContext = null;
+         try {
+             loginContext = new LoginContext(confName, subject, null, conf);
+         } catch (LoginException e) {
+             throw new IOException("Faill to create LoginContext for " + e);
+         }
+         try {
+             loginContext.login();
+             LOG.info("Login successful for user "
+                 + subject.getPrincipals().iterator().next().getName());
+         } catch (LoginException e) {
+             throw new IOException("Login failure for " + e);
+         }
+         return loginContext.getSubject();
+    }
+
+    public static Subject loginUsingKeytab(
+        String principal, File keytabFile) throws IOException {
+        Set<Principal> principals = new HashSet<Principal>();
+        principals.add(new KerberosPrincipal(principal));
+
+        Subject subject = new Subject(false, principals,
+            new HashSet<Object>(), new HashSet<Object>());
+
+        Configuration conf = useKeytab(principal, keytabFile);
+        String confName = "KeytabConf";
+        LoginContext loginContext = null;
+        try {
+            loginContext = new LoginContext(confName, subject, null, conf);
+        } catch (LoginException e) {
+            throw new IOException("Fail to create LoginContext for " + e);
+        }
+        try {
+            loginContext.login();
+             LOG.info("Login successful for user "
+                + subject.getPrincipals().iterator().next().getName());
+        } catch (LoginException e) {
+            throw new IOException("Login failure for " + e);
+        }
+        return loginContext.getSubject();
+    }
+
+    public static LoginContext loginUsingKeytabReturnContext(
+        String principal, File keytabFile) throws IOException {
+        Set<Principal> principals = new HashSet<Principal>();
+        principals.add(new KerberosPrincipal(principal));
+
+        Subject subject = new Subject(false, principals,
+            new HashSet<Object>(), new HashSet<Object>());
+
+        Configuration conf = useKeytab(principal, keytabFile);
+        String confName = "KeytabConf";
+        LoginContext loginContext = null;
+        try {
+            loginContext = new LoginContext(confName, subject, null, conf);
+        } catch (LoginException e) {
+            throw new IOException("Fail to create LoginContext for " + e);
+        }
+        try {
+            loginContext.login();
+            LOG.info("Login successful for user "
+                + subject.getPrincipals().iterator().next().getName());
+        } catch (LoginException e) {
+            throw new IOException("Login failure for " + e);
+        }
+        return loginContext;
+    }
+
+    public static Configuration useTicketCache(String principal,
+                                               File credentialFile) {
+        return new TicketCacheJaasConf(principal, credentialFile);
+    }
+
+    public static Configuration useKeytab(String principal, File keytabFile) {
+        return new KeytabJaasConf(principal, keytabFile);
+    }
+
+    static class TicketCacheJaasConf extends Configuration {
+        private String principal;
+        private File clientCredentialFile;
+
+        TicketCacheJaasConf(String principal, File clientCredentialFile) {
+            this.principal = principal;
+            this.clientCredentialFile = clientCredentialFile;
+        }
+
+        @Override
+        public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+            Map<String, String> options = new HashMap<String, String>();
+            options.put("principal", principal);
+            options.put("storeKey", "false");
+            options.put("doNotPrompt", "false");
+            options.put("useTicketCache", "true");
+            options.put("renewTGT", "true");
+            options.put("refreshKrb5Config", "true");
+            options.put("isInitiator", "true");
+            options.put("ticketCache", clientCredentialFile.getAbsolutePath());
+            options.putAll(BASIC_JAAS_OPTIONS);
+
+            return new AppConfigurationEntry[]{
+                new AppConfigurationEntry(getKrb5LoginModuleName(),
+                    AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                    options)};
+        }
+    }
+
+    static class KeytabJaasConf extends Configuration {
+        private String principal;
+        private File keytabFile;
+
+        KeytabJaasConf(String principal, File keytab) {
+            this.principal = principal;
+            this.keytabFile = keytab;
+        }
+
+        @Override
+        public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+            Map<String, String> options = new HashMap<String, String>();
+            options.put("keyTab", keytabFile.getAbsolutePath());
+            options.put("principal", principal);
+            options.put("useKeyTab", "true");
+            options.put("storeKey", "true");
+            options.put("doNotPrompt", "true");
+            options.put("renewTGT", "false");
+            options.put("refreshKrb5Config", "true");
+            options.put("isInitiator", "true");
+            options.putAll(BASIC_JAAS_OPTIONS);
+
+            return new AppConfigurationEntry[]{
+                new AppConfigurationEntry(getKrb5LoginModuleName(),
+                    AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                    options)};
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/util/HasUtil.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/util/HasUtil.java b/has/has-common/src/main/java/org/apache/kerby/has/common/util/HasUtil.java
new file mode 100644
index 0000000..03a9d9d
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/util/HasUtil.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.common.util;
+
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.crypto.EncryptionHandler;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.PrintStream;
+
+public class HasUtil {
+
+    public static EncryptionKey getClientKey(String userName, String passPhrase,
+                                             EncryptionType type) throws KrbException {
+        EncryptionKey clientKey = EncryptionHandler.string2Key(userName,
+            passPhrase, type);
+        return clientKey;
+    }
+
+    /**
+     * Get has configuration
+     * @param hasConfFile configuration directory
+     * @return has configuration
+     * @throws KrbException e
+     */
+    public static HasConfig getHasConfig(File hasConfFile) throws HasException {
+
+        if (hasConfFile.exists()) {
+            HasConfig hasConfig = new HasConfig();
+            try {
+                hasConfig.addIniConfig(hasConfFile);
+            } catch (IOException e) {
+                throw new HasException("Can not load the has configuration file "
+                    + hasConfFile.getAbsolutePath());
+            }
+            return hasConfig;
+        }
+
+        return null;
+    }
+
+    public static void setEnableConf(File hasConfFile, String value)
+            throws HasException, IOException {
+        String oldValue = getHasConfig(hasConfFile).getEnableConf();
+        if (oldValue == null) {
+            throw new HasException("Please set enable_conf in has-server.conf.");
+        }
+        if (oldValue.equals(value)) {
+            return;
+        }
+        try {
+            BufferedReader bf = new BufferedReader(new FileReader(hasConfFile));
+            StringBuilder sb = new StringBuilder();
+            String tempString;
+            while ((tempString = bf.readLine()) != null) {
+                if (tempString.trim().startsWith("enable_conf")) {
+                    tempString = tempString.replace(oldValue, value);
+                }
+                sb.append(tempString + "\n");
+            }
+            PrintStream ps = new PrintStream(new FileOutputStream(hasConfFile));
+            ps.print(sb.toString());
+            bf.close();
+        } catch (FileNotFoundException e) {
+            throw new HasException("Can not load the has configuration file "
+                    + hasConfFile.getAbsolutePath());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/util/PlatformName.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/util/PlatformName.java b/has/has-common/src/main/java/org/apache/kerby/has/common/util/PlatformName.java
new file mode 100644
index 0000000..8a2c961
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/util/PlatformName.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kerby.has.common.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * A helper class for getting build-info of the java-vm.
+ *
+ */
+@InterfaceAudience.LimitedPrivate({"HBase"})
+@InterfaceStability.Unstable
+public class PlatformName {
+  /**
+   * The complete platform 'name' to identify the platform as
+   * per the java-vm.
+   */
+  public static final String PLATFORM_NAME =
+      (System.getProperty("os.name").startsWith("Windows")
+      ? System.getenv("os") : System.getProperty("os.name"))
+      + "-" + System.getProperty("os.arch")
+      + "-" + System.getProperty("sun.arch.data.model");
+
+  /**
+   * The java vendor name used in this platform.
+   */
+  public static final String JAVA_VENDOR_NAME = System.getProperty("java.vendor");
+
+  /**
+   * A public static variable to indicate the current java vendor is
+   * IBM java or not.
+   */
+  public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM");
+
+  public static void main(String[] args) {
+    System.out.println(PLATFORM_NAME);
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/util/StringUtils.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/util/StringUtils.java b/has/has-common/src/main/java/org/apache/kerby/has/common/util/StringUtils.java
new file mode 100644
index 0000000..b9c323d
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/util/StringUtils.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kerby.has.common.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import java.util.Locale;
+
+/**
+ * General string utils
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class StringUtils {
+
+  /**
+   * Converts all of the characters in this String to lower case with
+   * Locale.ENGLISH.
+   *
+   * @param str  string to be converted
+   * @return     the str, converted to lowercase.
+   */
+  public static String toLowerCase(String str) {
+    return str.toLowerCase(Locale.ENGLISH);
+  }
+
+  /**
+   * Converts all of the characters in this String to upper case with
+   * Locale.ENGLISH.
+   *
+   * @param str  string to be converted
+   * @return     the str, converted to uppercase.
+   */
+  public static String toUpperCase(String str) {
+    return str.toUpperCase(Locale.ENGLISH);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/util/URLConnectionFactory.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/util/URLConnectionFactory.java b/has/has-common/src/main/java/org/apache/kerby/has/common/util/URLConnectionFactory.java
new file mode 100644
index 0000000..d3a61cf
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/util/URLConnectionFactory.java
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kerby.has.common.util;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.spnego.AuthenticatedURL;
+import org.apache.kerby.has.common.spnego.AuthenticationException;
+import org.apache.kerby.has.common.spnego.KerberosHasAuthenticator;
+import org.apache.kerby.has.common.ssl.SSLFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.SSLSocketFactory;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.net.URLConnection;
+import java.security.GeneralSecurityException;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Utilities for handling URLs
+ */
+@InterfaceStability.Unstable
+public class URLConnectionFactory {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(URLConnectionFactory.class);
+
+  /**
+   * Timeout for socket connects and reads
+   */
+   // 1 minute
+  public static final int DEFAULT_SOCKET_TIMEOUT = 60 * 1000;
+  private final ConnectionConfigurator connConfigurator;
+
+  private static final ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR
+      = new ConnectionConfigurator() {
+        @Override
+        public HttpURLConnection configure(HttpURLConnection conn)
+            throws IOException {
+          URLConnectionFactory.setTimeouts(conn,
+                                           DEFAULT_SOCKET_TIMEOUT,
+                                           DEFAULT_SOCKET_TIMEOUT);
+          return conn;
+        }
+      };
+
+  /**
+   * The URLConnectionFactory that sets the default timeout and it only trusts
+   * Java's SSL certificates.
+   */
+  public static final URLConnectionFactory DEFAULT_SYSTEM_CONNECTION_FACTORY =
+      new URLConnectionFactory(DEFAULT_TIMEOUT_CONN_CONFIGURATOR);
+
+  /**
+   * Construct a new URLConnectionFactory based on the configuration. It will
+   * try to load SSL certificates when it is specified.
+   */
+  public static URLConnectionFactory newDefaultURLConnectionFactory(HasConfig conf) {
+    ConnectionConfigurator conn = null;
+    try {
+      conn = newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
+    } catch (Exception e) {
+      LOG.debug(
+          "Cannot load customized ssl related configuration. Fallback to system-generic settings.",
+          e);
+      conn = DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
+    }
+    return new URLConnectionFactory(conn);
+  }
+
+  private static ConnectionConfigurator getSSLConnectionConfiguration(
+      HasConfig conf) {
+    ConnectionConfigurator conn;
+    try {
+      conn = newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
+    } catch (Exception e) {
+      LOG.warn(
+          "Cannot load customized ssl related configuration. Fallback to"
+              + " system-generic settings.",
+          e);
+      conn = DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
+    }
+
+    return conn;
+  }
+
+  @VisibleForTesting
+  URLConnectionFactory(ConnectionConfigurator connConfigurator) {
+    this.connConfigurator = connConfigurator;
+  }
+
+  /**
+   * Create a new ConnectionConfigurator for SSL connections
+   */
+  private static ConnectionConfigurator newSslConnConfigurator(
+      final int defaultTimeout, HasConfig conf)
+      throws IOException, GeneralSecurityException, HasException {
+    final SSLFactory factory;
+    final SSLSocketFactory sf;
+    final HostnameVerifier hv;
+    final int connectTimeout;
+    final int readTimeout;
+
+    factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
+    factory.init();
+    sf = factory.createSSLSocketFactory();
+    hv = factory.getHostnameVerifier();
+
+    connectTimeout = defaultTimeout;
+
+    readTimeout = defaultTimeout;
+
+    return new ConnectionConfigurator() {
+      @Override
+      public HttpURLConnection configure(HttpURLConnection conn)
+          throws IOException {
+        if (conn instanceof HttpsURLConnection) {
+          HttpsURLConnection c = (HttpsURLConnection) conn;
+          c.setSSLSocketFactory(sf);
+          c.setHostnameVerifier(hv);
+        }
+        URLConnectionFactory.setTimeouts(conn, connectTimeout, readTimeout);
+        return conn;
+      }
+    };
+  }
+
+  /**
+   * Opens a url with read and connect timeouts
+   *
+   * @param url
+   *          to open
+   * @return URLConnection
+   * @throws IOException
+   */
+  public URLConnection openConnection(URL url) throws IOException {
+    try {
+      return openConnection(url, false, null);
+    } catch (AuthenticationException e) {
+      // Unreachable
+      LOG.error("Open connection {} failed", url, e);
+      return null;
+    }
+  }
+
+  /**
+   * Opens a url with read and connect timeouts
+   *
+   * @param url
+   *          URL to open
+   * @param isSpnego
+   *          whether the url should be authenticated via SPNEGO
+   * @return URLConnection
+   * @throws IOException
+   * @throws AuthenticationException
+   */
+  public URLConnection openConnection(URL url, boolean isSpnego, HasConfig hasConfig)
+      throws IOException, AuthenticationException {
+    if (isSpnego && (hasConfig != null)) {
+      LOG.debug("open AuthenticatedURL connection {}", url);
+//      UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab();
+      final AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
+      return new AuthenticatedURL(new KerberosHasAuthenticator(hasConfig.getAdminKeytab(),
+          hasConfig.getAdminKeytabPrincipal()),
+          connConfigurator).openConnection(url, authToken);
+    } else {
+      LOG.debug("open URL connection");
+      URLConnection connection = url.openConnection();
+      if (connection instanceof HttpURLConnection) {
+        connConfigurator.configure((HttpURLConnection) connection);
+      }
+      return connection;
+    }
+  }
+
+  /**
+   * Sets timeout parameters on the given URLConnection.
+   *
+   * @param connection
+   *          URLConnection to set
+   * @param connectTimeout
+   *          the connection and read timeout of the connection.
+   */
+  private static void setTimeouts(URLConnection connection,
+                                  int connectTimeout,
+                                  int readTimeout) {
+    connection.setConnectTimeout(connectTimeout);
+    connection.setReadTimeout(readTimeout);
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-dist/bin/hadmin-local.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/hadmin-local.sh b/has/has-dist/bin/hadmin-local.sh
index 5a7eb3e..1acf3a0 100644
--- a/has/has-dist/bin/hadmin-local.sh
+++ b/has/has-dist/bin/hadmin-local.sh
@@ -16,7 +16,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-APP_MAIN=org.apache.hadoop.has.tool.server.hadmin.local.HadminLocalTool
+APP_MAIN=org.apache.kerby.has.tool.server.hadmin.local.HadminLocalTool
 
 # Reset HAS_CONF_DIR if CONF_DIR not null
 if [ X"$1" = X"-k" ]; then

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-dist/bin/hadmin-remote.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/hadmin-remote.sh b/has/has-dist/bin/hadmin-remote.sh
index 233c056..1e59294 100644
--- a/has/has-dist/bin/hadmin-remote.sh
+++ b/has/has-dist/bin/hadmin-remote.sh
@@ -17,7 +17,7 @@
 # limitations under the License.
 
 CONF_DIR=$1
-APP_MAIN=org.apache.hadoop.has.tool.client.hadmin.remote.HadminRemoteTool
+APP_MAIN=org.apache.kerby.has.tool.client.hadmin.remote.HadminRemoteTool
 
 # Reset HAS_CONF_DIR if CONF_DIR not null
 if [ "$CONF_DIR" != "" ]; then

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-dist/bin/kdcinit.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/kdcinit.sh b/has/has-dist/bin/kdcinit.sh
index f6e30c3..f3216e0 100644
--- a/has/has-dist/bin/kdcinit.sh
+++ b/has/has-dist/bin/kdcinit.sh
@@ -17,7 +17,7 @@
 # limitations under the License.
 
 CONF_DIR=$1
-APP_MAIN=org.apache.hadoop.has.tool.client.kdcinit.HasInitTool
+APP_MAIN=org.apache.kerby.has.tool.client.kdcinit.HasInitTool
 
 # Reset HAS_CONF_DIR if CONF_DIR not null
 if [ "$CONF_DIR" != "" ]; then

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-dist/bin/kinit.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/kinit.sh b/has/has-dist/bin/kinit.sh
index 3d605d6..97f33aa 100644
--- a/has/has-dist/bin/kinit.sh
+++ b/has/has-dist/bin/kinit.sh
@@ -16,7 +16,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-APP_MAIN=org.apache.hadoop.has.tool.client.kinit.KinitTool
+APP_MAIN=org.apache.kerby.has.tool.client.kinit.KinitTool
 
 # Get HAS_HOME directory
 bin=`dirname "$0"`

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-dist/bin/klist.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/klist.sh b/has/has-dist/bin/klist.sh
index 0643ae7..04103ec 100644
--- a/has/has-dist/bin/klist.sh
+++ b/has/has-dist/bin/klist.sh
@@ -16,7 +16,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-APP_MAIN=org.apache.hadoop.has.tool.client.klist.KlistTool
+APP_MAIN=org.apache.kerby.has.tool.client.klist.KlistTool
 
 # Get HAS_HOME directory
 bin=`dirname "$0"`

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-dist/bin/login-test.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/login-test.sh b/has/has-dist/bin/login-test.sh
index f26b1df..0699945 100644
--- a/has/has-dist/bin/login-test.sh
+++ b/has/has-dist/bin/login-test.sh
@@ -16,7 +16,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-APP_MAIN=org.apache.hadoop.has.tool.client.hclient.HasClientLoginTool
+APP_MAIN=org.apache.kerby.has.tool.client.hclient.HasClientLoginTool
 
 # Get HAS_HOME directory
 bin=`dirname "$0"`

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-dist/bin/start-has.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/start-has.sh b/has/has-dist/bin/start-has.sh
index 95a6913..cfb8d08 100644
--- a/has/has-dist/bin/start-has.sh
+++ b/has/has-dist/bin/start-has.sh
@@ -27,7 +27,7 @@ usage()
 CONF_DIR=$1
 WORK_DIR=$2
 pid=/tmp/has.pid # Pid file to save pid numbers
-APP_MAIN=org.apache.hadoop.has.server.HasServer
+APP_MAIN=org.apache.kerby.has.server.HasServer
 
 # Reset HAS_CONF_DIR and HAS_WORK_DIR if CONF_DIR or WORK_DIR not null
 if [ "$CONF_DIR" != "" ]; then

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-dist/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-dist/pom.xml b/has/has-dist/pom.xml
index 81eccc5..470dd73 100644
--- a/has/has-dist/pom.xml
+++ b/has/has-dist/pom.xml
@@ -5,7 +5,7 @@
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
-    <groupId>org.apache.hadoop</groupId>
+    <groupId>org.apache.kerby</groupId>
     <artifactId>has-project</artifactId>
     <version>1.0.0-SNAPSHOT</version>
   </parent>
@@ -16,32 +16,32 @@
 
   <dependencies>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.kerby</groupId>
       <artifactId>has-common</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.kerby</groupId>
       <artifactId>has-common</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.kerby</groupId>
       <artifactId>has-client</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.kerby</groupId>
       <artifactId>has-server</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.kerby</groupId>
       <artifactId>has-client-tool</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.kerby</groupId>
       <artifactId>has-server-tool</artifactId>
       <version>${project.version}</version>
     </dependency>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-plugins/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-plugins/pom.xml b/has/has-plugins/pom.xml
index d5b2195..950011b 100644
--- a/has/has-plugins/pom.xml
+++ b/has/has-plugins/pom.xml
@@ -4,7 +4,7 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
         <artifactId>has-project</artifactId>
-        <groupId>org.apache.hadoop</groupId>
+        <groupId>org.apache.kerby</groupId>
         <version>1.0.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
@@ -13,7 +13,7 @@
 
     <dependencies>
         <dependency>
-            <groupId>org.apache.hadoop</groupId>
+            <groupId>org.apache.kerby</groupId>
             <artifactId>has-common</artifactId>
             <version>${project.version}</version>
         </dependency>
@@ -24,12 +24,12 @@
             <scope>test</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.hadoop</groupId>
+            <groupId>org.apache.kerby</groupId>
             <artifactId>has-client</artifactId>
             <version>${project.version}</version>
         </dependency>
         <dependency>
-            <groupId>org.apache.hadoop</groupId>
+            <groupId>org.apache.kerby</groupId>
             <artifactId>has-server</artifactId>
             <version>${project.version}</version>
         </dependency>


[07/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/AbstractHasServerPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/AbstractHasServerPlugin.java b/has/has-server/src/main/java/org/apache/kerby/has/server/AbstractHasServerPlugin.java
new file mode 100644
index 0000000..f9d0b33
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/AbstractHasServerPlugin.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server;
+
+import org.apache.kerby.kerberos.kerb.KrbRuntime;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class AbstractHasServerPlugin implements HasServerPlugin {
+
+    public static final Logger LOG = LoggerFactory.getLogger(AbstractHasServerPlugin.class);
+
+    protected abstract void doAuthenticate(AuthToken userToken, AuthToken authToken)
+        throws HasAuthenException;
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public AuthToken authenticate(AuthToken userToken) throws HasAuthenException {
+
+        AuthToken authToken = KrbRuntime.getTokenProvider("JWT").createTokenFactory().createToken();
+
+        doAuthenticate(userToken, authToken);
+
+        return authToken;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/HasAuthenException.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/HasAuthenException.java b/has/has-server/src/main/java/org/apache/kerby/has/server/HasAuthenException.java
new file mode 100644
index 0000000..57342b1
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/HasAuthenException.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server;
+
+import org.apache.kerby.has.common.HasException;
+
+public class HasAuthenException extends HasException {
+    private static final long serialVersionUID = 171016915395892939L;
+
+    public HasAuthenException(Throwable cause) {
+        super(cause);
+    }
+
+    public HasAuthenException(String message) {
+        super(message);
+    }
+
+    public HasAuthenException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/HasServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/HasServer.java b/has/has-server/src/main/java/org/apache/kerby/has/server/HasServer.java
new file mode 100644
index 0000000..46e1d10
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/HasServer.java
@@ -0,0 +1,701 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server;
+
+import org.apache.commons.dbutils.DbUtils;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.has.server.web.WebConfigKey;
+import org.apache.kerby.has.server.web.WebServer;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.kerby.kerberos.kdc.impl.NettyKdcServerImpl;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadmin;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadminImpl;
+import org.apache.kerby.kerberos.kerb.client.ClientUtil;
+import org.apache.kerby.kerberos.kerb.client.KrbConfig;
+import org.apache.kerby.kerberos.kerb.client.KrbSetting;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.identity.backend.IdentityBackend;
+import org.apache.kerby.kerberos.kerb.server.KdcServer;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+import org.apache.kerby.util.IOUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Pattern;
+
+/**
+ * The HAS KDC server implementation.
+ */
+public class HasServer {
+    public static final Logger LOG = LoggerFactory.getLogger(HasServer.class);
+
+    private static HasServer server = null;
+
+    private KrbSetting krbSetting;
+    private KdcServer kdcServer;
+    private WebServer webServer;
+    private File confDir;
+    private File workDir;
+    private String kdcHost;
+    private HasConfig hasConfig;
+
+    public HasServer(File confDir) throws KrbException {
+        this.confDir = confDir;
+    }
+
+    private void setConfDir(File confDir) {
+        this.confDir = confDir;
+    }
+
+    public File getConfDir() {
+        return confDir;
+    }
+
+    public File getWorkDir() {
+        return workDir;
+    }
+
+    public void setWorkDir(File workDir) {
+        this.workDir = workDir;
+    }
+
+    public void setKdcHost(String host) {
+        this.kdcHost = host;
+    }
+
+    public String getKdcHost() {
+        return kdcHost;
+    }
+
+    public KrbSetting getKrbSetting() {
+        return krbSetting;
+    }
+
+    public KdcServer getKdcServer() {
+        return kdcServer;
+    }
+
+    public WebServer getWebServer() {
+        return webServer;
+    }
+
+    public void setWebServer(WebServer webServer) {
+        this.webServer = webServer;
+    }
+
+    public void startKdcServer() throws HasException {
+        BackendConfig backendConfig;
+        try {
+            backendConfig = KdcUtil.getBackendConfig(getConfDir());
+        } catch (KrbException e) {
+            throw new HasException("Failed to get backend config. " + e);
+        }
+        String backendJar = backendConfig.getString("kdc_identity_backend");
+        if (backendJar.equals("org.apache.kerby.has.server.kdc.MySQLIdentityBackend")) {
+            updateKdcConf();
+        }
+        try {
+            kdcServer = new KdcServer(confDir);
+        } catch (KrbException e) {
+            throw new HasException("Failed to create KdcServer. " + e);
+        }
+        kdcServer.setWorkDir(workDir);
+        kdcServer.setInnerKdcImpl(new NettyKdcServerImpl(kdcServer.getKdcSetting()));
+        try {
+            kdcServer.init();
+        } catch (KrbException e) {
+            LOG.error("Errors occurred when init has kdc server:  " + e.getMessage());
+            throw new HasException("Errors occurred when init has kdc server:  " + e.getMessage());
+        }
+
+        KrbConfig krbConfig = null;
+        try {
+            krbConfig = ClientUtil.getConfig(confDir);
+        } catch (KrbException e) {
+            new HasException("Errors occurred when getting the config from conf dir. "
+                + e.getMessage());
+        }
+        if (krbConfig == null) {
+            krbConfig = new KrbConfig();
+        }
+        this.krbSetting = new KrbSetting(krbConfig);
+        try {
+            kdcServer.start();
+        } catch (KrbException e) {
+            throw new HasException("Failed to start kdc server. " + e);
+        }
+        try {
+            HasUtil.setEnableConf(new File(confDir, "has-server.conf"), "false");
+        } catch (Exception e) {
+            throw new HasException("Failed to enable conf. " + e);
+        }
+        setHttpFilter();
+    }
+
+    private void setHttpFilter() throws HasException {
+        File httpKeytabFile = new File(workDir, "http.keytab");
+        LocalKadmin kadmin = new LocalKadminImpl(kdcServer.getKdcSetting(),
+            kdcServer.getIdentityService());
+        createHttpPrincipal(kadmin);
+        try {
+            kadmin.exportKeytab(httpKeytabFile, getHttpPrincipal());
+        } catch (KrbException e) {
+            throw new HasException("Failed to export keytab: " + e.getMessage());
+        }
+        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE,
+            hasConfig.getFilterAuthType());
+        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+            getHttpPrincipal());
+        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
+            httpKeytabFile.getPath());
+        webServer.defineFilter();
+    }
+
+    public File initKdcServer() throws KrbException {
+        File adminKeytabFile = new File(workDir, "admin.keytab");
+        LocalKadmin kadmin = new LocalKadminImpl(kdcServer.getKdcSetting(),
+            kdcServer.getIdentityService());
+        if (adminKeytabFile.exists()) {
+            throw new KrbException("KDC Server is already inited.");
+        }
+        kadmin.createBuiltinPrincipals();
+        kadmin.exportKeytab(adminKeytabFile, kadmin.getKadminPrincipal());
+        System.out.println("The keytab for kadmin principal "
+            + " has been exported to the specified file "
+            + adminKeytabFile.getAbsolutePath() + ", please safely keep it, "
+            + "in order to use kadmin tool later");
+
+        return adminKeytabFile;
+    }
+
+    public void createHttpPrincipal(LocalKadmin kadmin) throws HasException {
+        String httpPrincipal = getHttpPrincipal();
+        IdentityBackend backend = kdcServer.getIdentityService();
+        try {
+            if (backend.getIdentity(httpPrincipal) == null) {
+                kadmin.addPrincipal(httpPrincipal);
+            } else {
+                LOG.info("The http principal already exists in backend.");
+            }
+        } catch (KrbException e) {
+            throw new HasException("Failed to add princial, " + e.getMessage());
+        }
+    }
+
+    public String getHttpPrincipal() throws HasException {
+        String realm = kdcServer.getKdcSetting().getKdcRealm();
+        String nameString;
+        try {
+            InetAddress addr = InetAddress.getLocalHost();
+            String fqName = addr.getCanonicalHostName();
+            nameString = "HTTP/" + fqName + "@" + realm;
+        } catch (UnknownHostException e) {
+            throw new HasException(e);
+        }
+        LOG.info("The http principal name is: " + nameString);
+        return nameString;
+    }
+
+    /**
+     * Update conf file.
+     *
+     * @param confName  conf file name
+     * @param values    customized values
+     * @throws IOException throw IOException
+     * @throws KrbException e
+     */
+    public void updateConfFile(String confName, Map<String, String> values)
+        throws IOException, HasException {
+        File confFile = new File(getConfDir().getAbsolutePath(), confName);
+        if (confFile.exists()) {
+            // Update conf file content
+            InputStream templateResource;
+            if (confName.equals("has-server.conf")) {
+                templateResource = new FileInputStream(confFile);
+            } else {
+                String resourcePath = "/" + confName + ".template";
+                templateResource = getClass().getResourceAsStream(resourcePath);
+            }
+            String content = IOUtil.readInput(templateResource);
+            for (Map.Entry<String, String> entry : values.entrySet()) {
+                content = content.replaceAll(Pattern.quote(entry.getKey()), entry.getValue());
+            }
+
+            // Delete the original conf file
+            boolean delete = confFile.delete();
+            if (!delete) {
+                throw new HasException("Failed to delete conf file: " + confName);
+            }
+
+            // Save the updated conf file
+            IOUtil.writeFile(content, confFile);
+        } else {
+            throw new HasException("Conf file: " + confName + " not found.");
+        }
+    }
+
+    /**
+     * Get KDC Config from MySQL.
+     *
+     * @return Kdc config
+     * @throws KrbException e
+     */
+    private Map<String, String> getKdcConf() throws HasException {
+        PreparedStatement preStm = null;
+        ResultSet result = null;
+        Map<String, String> kdcConf = new HashMap<>();
+        BackendConfig backendConfig;
+        try {
+            backendConfig = KdcUtil.getBackendConfig(getConfDir());
+        } catch (KrbException e) {
+            throw new HasException("Getting backend config failed." + e.getMessage());
+        }
+        String driver = backendConfig.getString("mysql_driver");
+        String url = backendConfig.getString("mysql_url");
+        String user = backendConfig.getString("mysql_user");
+        String password = backendConfig.getString("mysql_password");
+        Connection connection = startConnection(driver, url, user, password);
+        try {
+
+            // Get Kdc configuration from kdc_config table
+            String stmKdc = "SELECT * FROM `kdc_config` WHERE id = 1";
+            preStm = connection.prepareStatement(stmKdc);
+            result = preStm.executeQuery();
+            while (result.next()) {
+                String realm = result.getString("realm");
+                String servers = result.getString("servers");
+                String port = String.valueOf(result.getInt("port"));
+                kdcConf.put("servers", servers);
+                kdcConf.put("_PORT_", port);
+                kdcConf.put("_REALM_", realm);
+            }
+
+        } catch (SQLException e) {
+            LOG.error("Error occurred while getting kdc config.");
+            throw new HasException("Failed to get kdc config. ", e);
+        } finally {
+            DbUtils.closeQuietly(preStm);
+            DbUtils.closeQuietly(result);
+            DbUtils.closeQuietly(connection);
+        }
+
+        return kdcConf;
+    }
+
+    /**
+     * Update KDC conf file.
+     *
+     * @throws KrbException e
+     */
+    private void updateKdcConf() throws HasException {
+        try {
+            Map<String, String> values = getKdcConf();
+            String host = getKdcHost();
+            if (host == null) {
+                host = getWebServer().getBindAddress().getHostName();
+            }
+            values.remove("servers");
+            values.put("_HOST_", host);
+            updateConfFile("kdc.conf", values);
+        } catch (IOException e) {
+            throw new HasException("Failed to update kdc config. ", e);
+        }
+    }
+
+    /**
+     * Start the MySQL connection.
+     *
+     * @param url url of connection
+     * @param user username of connection
+     * @param password password of connection
+     * @throws KrbException e
+     * @return MySQL JDBC connection
+     */
+    private Connection startConnection(String driver, String url, String user,
+                                       String password) throws HasException {
+        Connection connection;
+        try {
+            Class.forName(driver);
+            connection = DriverManager.getConnection(url, user, password);
+            if (!connection.isClosed()) {
+                LOG.info("Succeeded in connecting to MySQL.");
+            }
+        } catch (ClassNotFoundException e) {
+            throw new HasException("JDBC Driver Class not found. ", e);
+        } catch (SQLException e) {
+            throw new HasException("Failed to connecting to MySQL. ", e);
+        }
+
+        return connection;
+    }
+
+    /**
+     * Config HAS server KDC which have MySQL backend.
+     * @param backendConfig MySQL backend config
+     * @param realm KDC realm to set
+     * @param host KDC host to set
+     * @param hasServer has server to get param
+     * @throws HasException e
+     */
+    public void configMySQLKdc(BackendConfig backendConfig, String realm, int port,
+                               String host, HasServer hasServer) throws HasException {
+
+        // Start mysql connection
+        String driver = backendConfig.getString("mysql_driver");
+        String url = backendConfig.getString("mysql_url");
+        String user = backendConfig.getString("mysql_user");
+        String password = backendConfig.getString("mysql_password");
+        Connection connection = startConnection(driver, url, user, password);
+
+        ResultSet resConfig = null;
+        PreparedStatement preStm = null;
+        try {
+            createKdcTable(connection); // Create kdc_config table if not exists
+            String stm = "SELECT * FROM `kdc_config` WHERE id = 1";
+            preStm = connection.prepareStatement(stm);
+            resConfig = preStm.executeQuery();
+            if (!resConfig.next()) {
+                addKdcConfig(connection, realm, port, host);
+            } else {
+                String oldHost = hasServer.getKdcHost();
+                String servers = resConfig.getString("servers");
+                String[] serverArray = servers.split(",");
+                List<String> serverList = new ArrayList<>();
+                Collections.addAll(serverList, serverArray);
+                if (serverList.contains(oldHost)) {
+                    servers = servers.replaceAll(oldHost, host);
+                } else {
+                    servers = servers + "," + host;
+                }
+                boolean initialized = resConfig.getBoolean("initialized");
+                updateKdcConfig(connection, initialized, port, realm, servers);
+            }
+            hasServer.setKdcHost(host);
+        } catch (SQLException e) {
+            throw new HasException("Failed to config HAS KDC. ", e);
+        } finally {
+            DbUtils.closeQuietly(preStm);
+            DbUtils.closeQuietly(resConfig);
+            DbUtils.closeQuietly(connection);
+        }
+    }
+
+    /**
+     * Create kdc_config table in database.
+     * @param conn database connection
+     * @throws KrbException e
+     */
+    private void createKdcTable(final Connection conn) throws HasException {
+        PreparedStatement preStm = null;
+        try {
+            String stm = "CREATE TABLE IF NOT EXISTS `kdc_config` ("
+                + "port INTEGER DEFAULT 88, servers VARCHAR(255) NOT NULL, "
+                + "initialized bool DEFAULT FALSE, realm VARCHAR(255) "
+                + "DEFAULT NULL, id INTEGER DEFAULT 1, CHECK (id=1), PRIMARY KEY (id)) "
+                + "ENGINE=INNODB;";
+            preStm = conn.prepareStatement(stm);
+            preStm.executeUpdate();
+        } catch (SQLException e) {
+            throw new HasException("Failed to create kdc_config table. ", e);
+        } finally {
+            DbUtils.closeQuietly(preStm);
+        }
+    }
+
+    /**
+     * Add KDC Config information in database.
+     * @param conn database connection
+     * @param realm realm to add
+     * @param port port to add
+     * @param host host to add
+     */
+    private void addKdcConfig(Connection conn, String realm, int port, String host)
+        throws HasException {
+        PreparedStatement preStm = null;
+        try {
+            String stm = "INSERT INTO `kdc_config` (port, servers, realm)" + " VALUES(?, ?, ?)";
+            preStm = conn.prepareStatement(stm);
+            preStm.setInt(1, port);
+            preStm.setString(2, host);
+            preStm.setString(3, realm);
+            preStm.executeUpdate();
+        } catch (SQLException e) {
+            throw new HasException("Failed to insert into kdc_config table. ", e);
+        } finally {
+            DbUtils.closeQuietly(preStm);
+        }
+    }
+
+    /**
+     * Update KDC Config record in database.
+     * @param conn database connection
+     * @param realm realm to update
+     * @param port port to update
+     * @param servers servers to update
+     * @param initialized initial state of KDC Config
+     */
+    private void updateKdcConfig(Connection conn, boolean initialized, int port,
+                                 String realm, String servers) throws HasException {
+        PreparedStatement preStm = null;
+        try {
+            if (initialized) {
+                String stmUpdate = "UPDATE `kdc_config` SET servers = ? WHERE id = 1";
+                preStm = conn.prepareStatement(stmUpdate);
+                preStm.setString(1, servers);
+                preStm.executeUpdate();
+            } else {
+                String stmUpdate = "UPDATE `kdc_config` SET port = ?, realm = ?, servers = ? WHERE id = 1";
+                preStm = conn.prepareStatement(stmUpdate);
+                preStm.setInt(1, port);
+                preStm.setString(2, realm);
+                preStm.setString(3, servers);
+                preStm.executeUpdate();
+            }
+        } catch (SQLException e) {
+            throw new HasException("Failed to update KDC Config. ", e);
+        } finally {
+            DbUtils.closeQuietly(preStm);
+        }
+    }
+
+    /**
+     * Read in krb5-template.conf and substitute in the correct port.
+     *
+     * @return krb5 conf file
+     * @throws IOException e
+     * @throws KrbException e
+     */
+    public File generateKrb5Conf() throws HasException {
+        Map<String, String> kdcConf = getKdcConf();
+        String[] servers = kdcConf.get("servers").split(",");
+        int kdcPort = Integer.parseInt(kdcConf.get("_PORT_"));
+        String kdcRealm = kdcConf.get("_REALM_");
+        StringBuilder kdcBuilder = new StringBuilder();
+        for (String server : servers) {
+            String append = "\t\tkdc = " + server.trim() + ":" + kdcPort + "\n";
+            kdcBuilder.append(append);
+        }
+        String kdc = kdcBuilder.toString();
+        kdc = kdc.substring(0, kdc.length() - 1);
+        String resourcePath = "/krb5.conf.template";
+        InputStream templateResource = getClass().getResourceAsStream(resourcePath);
+        String content = null;
+        try {
+            content = IOUtil.readInput(templateResource);
+        } catch (IOException e) {
+            throw new HasException("Read template resource failed. " + e);
+        }
+        content = content.replaceAll("_REALM_", kdcRealm);
+        content = content.replaceAll("_PORT_", String.valueOf(kdcPort));
+        content = content.replaceAll("_UDP_LIMIT_", "4096");
+        content = content.replaceAll("_KDCS_", kdc);
+        File confFile = new File(confDir, "krb5.conf");
+        if (confFile.exists()) {
+            boolean delete = confFile.delete();
+            if (!delete) {
+                throw new HasException("File delete error!");
+            }
+        }
+        try {
+            IOUtil.writeFile(content, confFile);
+        } catch (IOException e) {
+            throw new HasException("Write content to conf file failed. " + e);
+        }
+
+        return confFile;
+    }
+
+    /**
+     * Read in has-server.conf and create has-client.conf.
+     *
+     * @return has conf file
+     * @throws IOException e
+     * @throws HasException e
+     */
+    public File generateHasConf() throws HasException, IOException {
+        Map<String, String> kdcConf = getKdcConf();
+        String servers = kdcConf.get("servers");
+        File confFile = new File(getConfDir().getAbsolutePath(), "has-server.conf");
+        HasConfig hasConfig = HasUtil.getHasConfig(confFile);
+        if (hasConfig != null) {
+            String defaultValue = hasConfig.getHttpsHost();
+            InputStream templateResource = new FileInputStream(confFile);
+            String content = IOUtil.readInput(templateResource);
+            content = content.replaceFirst(Pattern.quote(defaultValue), servers);
+            File hasFile = new File(confDir, "has-client.conf");
+            IOUtil.writeFile(content, hasFile);
+            return hasFile;
+        } else {
+            throw new HasException("has-server.conf not found. ");
+        }
+    }
+
+    public void stopKdcServer() {
+        try {
+            kdcServer.stop();
+        } catch (KrbException e) {
+            LOG.error("Fail to stop has kdc server");
+        }
+    }
+
+    public void startWebServer() throws HasException {
+        if (webServer == null) {
+            HasConfig conf = new HasConfig();
+
+            // Parse has-server.conf to get http_host and http_port
+            File confFile = new File(confDir, "has-server.conf");
+            hasConfig = HasUtil.getHasConfig(confFile);
+            if (hasConfig != null) {
+                try {
+                    String httpHost;
+                    String httpPort;
+                    String httpsHost;
+                    String httpsPort;
+                    if (hasConfig.getHttpHost() != null) {
+                        httpHost = hasConfig.getHttpHost();
+                    } else {
+                        LOG.info("Cannot get the http_host from has-server.conf, using the default http host.");
+                        httpHost = WebConfigKey.HAS_HTTP_HOST_DEFAULT;
+                    }
+                    if (hasConfig.getHttpPort() != null) {
+                        httpPort = hasConfig.getHttpPort();
+                    } else {
+                        LOG.info("Cannot get the http_port from has-server.conf, using the default http port.");
+                        httpPort = String.valueOf(WebConfigKey.HAS_HTTP_PORT_DEFAULT);
+                    }
+                    if (hasConfig.getHttpsHost() != null) {
+                        httpsHost = hasConfig.getHttpsHost();
+                    } else {
+                        LOG.info("Cannot get the https_host from has-server.conf, using the default https host.");
+                        httpsHost = WebConfigKey.HAS_HTTPS_HOST_DEFAULT;
+                    }
+                    if (hasConfig.getHttpsPort() != null) {
+                        httpsPort = hasConfig.getHttpsPort();
+                    } else {
+                        LOG.info("Cannot get the https_port from has-server.conf , using the default https port.");
+                        httpsPort = String.valueOf(WebConfigKey.HAS_HTTPS_PORT_DEFAULT);
+                    }
+                    String hasHttpAddress = httpHost + ":" + httpPort;
+                    String hasHttpsAddress = httpsHost + ":" + httpsPort;
+                    LOG.info("The web server http address: " + hasHttpAddress);
+                    LOG.info("The web server https address: " + hasHttpsAddress);
+
+                    conf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY, hasHttpAddress);
+                    conf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, hasHttpsAddress);
+                    conf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY,
+                        HttpConfig.Policy.HTTP_AND_HTTPS.name());
+                    conf.setString(WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+                        hasConfig.getSslServerConf());
+                    webServer = new WebServer(conf);
+                } catch (NumberFormatException e) {
+                    throw new IllegalArgumentException("https_port should be a number. "
+                        + e.getMessage());
+                }
+            } else {
+                throw new HasException("has-server.conf not found in " + confDir + ". ");
+            }
+        } else {
+            hasConfig = webServer.getConf();
+        }
+        webServer.start();
+        webServer.defineConfFilter();
+        try {
+            HasUtil.setEnableConf(new File(confDir, "has-server.conf"), "true");
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when enable conf. " + e.getMessage());
+        }
+        webServer.setWebServerAttribute(this);
+    }
+
+    public void stopWebServer() {
+        if (webServer != null) {
+            try {
+                webServer.stop();
+            } catch (Exception e) {
+                LOG.error("Failed to stop http server. " + e.getMessage());
+            }
+        }
+    }
+
+    public static void main(String[] args) {
+        if (args[0].equals("-start")) {
+            String confDirPath = args[1];
+            String workDirPath = args[2];
+            File confDir = new File(confDirPath);
+            File workDir = new File(workDirPath);
+            if (!confDir.exists() || !workDir.exists()) {
+                LOG.error("Invalid or not exist conf-dir or work-dir");
+                System.exit(3);
+            }
+            try {
+                server = new HasServer(confDir);
+            } catch (KrbException e) {
+                LOG.error("Errors occurred when create kdc server:  " + e.getMessage());
+                System.exit(4);
+            }
+            server.setConfDir(confDir);
+            server.setWorkDir(workDir);
+            //Only start the webserver, the kdcserver can start after setting the realm
+            try {
+                server.startWebServer();
+            } catch (HasException e) {
+                LOG.error("Errors occurred when start has http server:  " + e.getMessage());
+                System.exit(6);
+            }
+
+            if (server.getWebServer().getHttpAddress() != null) {
+                LOG.info("HAS http server started.");
+                LOG.info("host: " + server.getWebServer().getHttpAddress().getHostName());
+                LOG.info("port: " + server.getWebServer().getHttpAddress().getPort());
+            }
+            if (server.getWebServer().getHttpsAddress() != null) {
+                LOG.info("HAS https server started.");
+                LOG.info("host: " + server.getWebServer().getHttpsAddress().getHostName());
+                LOG.info("port: " + server.getWebServer().getHttpsAddress().getPort());
+            }
+        } else if (args[0].equals("-stop")) {
+            if (server != null) {
+                server.stopWebServer();
+                server.stopKdcServer();
+            }
+        } else {
+            System.exit(2);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/HasServerPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/HasServerPlugin.java b/has/has-server/src/main/java/org/apache/kerby/has/server/HasServerPlugin.java
new file mode 100644
index 0000000..466e5ff
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/HasServerPlugin.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server;
+
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+
+public interface HasServerPlugin {
+        /**
+         * Get the login module type ID, used to distinguish this module from others.
+         * Should correspond to the client side module.
+         *
+         * @return login type
+         */
+        String getLoginType();
+
+        /**
+         * Perform all the server side authentication logics, the results wrapped in an AuthToken,
+         * will be used to exchange a Kerberos ticket.
+         *
+         * @param userToken user token
+         * @return auth token
+         */
+        AuthToken authenticate(AuthToken userToken) throws HasAuthenException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/HasServerPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/HasServerPluginRegistry.java b/has/has-server/src/main/java/org/apache/kerby/has/server/HasServerPluginRegistry.java
new file mode 100644
index 0000000..d75b714
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/HasServerPluginRegistry.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server;
+
+import org.apache.kerby.has.common.HasException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.ServiceLoader;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class HasServerPluginRegistry {
+    static final Logger LOG = LoggerFactory.getLogger(HasServerPluginRegistry.class);
+
+    private static Map<String, Class> allPlugins = new ConcurrentHashMap<>();
+
+    static {
+        ServiceLoader<HasServerPlugin> plugins = ServiceLoader.load(HasServerPlugin.class);
+
+        for (HasServerPlugin plugin : plugins) {
+            allPlugins.put(plugin.getLoginType(), plugin.getClass());
+        }
+    }
+
+    public static Set<String> registeredPlugins() {
+        return Collections.unmodifiableSet(allPlugins.keySet());
+    }
+
+    public static boolean registeredPlugin(String name) {
+        return allPlugins.containsKey(name);
+    }
+
+    public static HasServerPlugin createPlugin(String name) throws HasException {
+        if (!registeredPlugin(name)) {
+            throw new HasException("Unregistered plugin " + name);
+        }
+        try {
+            HasServerPlugin serverPlugin = (HasServerPlugin) allPlugins.get(name).newInstance();
+            return serverPlugin;
+        } catch (Exception e) {
+            LOG.error("Create {} plugin failed", name, e);
+            throw new HasException(e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/admin/LocalHasAdmin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/admin/LocalHasAdmin.java b/has/has-server/src/main/java/org/apache/kerby/has/server/admin/LocalHasAdmin.java
new file mode 100644
index 0000000..50644c3
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/admin/LocalHasAdmin.java
@@ -0,0 +1,382 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server.admin;
+
+import org.apache.kerby.has.common.HasAdmin;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.has.server.HasServer;
+import org.apache.kerby.has.server.web.HostRoleType;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadmin;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadminImpl;
+import org.apache.kerby.kerberos.kerb.common.KrbUtil;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.request.KrbIdentity;
+import org.apache.kerby.kerberos.kerb.server.KdcConfig;
+import org.apache.kerby.kerberos.kerb.server.KdcSetting;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+import org.apache.kerby.kerberos.kerb.server.ServerSetting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+public class LocalHasAdmin implements HasAdmin {
+    public static final Logger LOG = LoggerFactory.getLogger(LocalHasAdmin.class);
+
+    private final ServerSetting serverSetting;
+    private File confDir;
+
+    public LocalHasAdmin(HasServer hasServer) throws KrbException {
+        if (hasServer.getKdcServer() == null) {
+            throw new RuntimeException("Could not get HAS KDC server, please start KDC first.");
+        }
+        this.serverSetting = hasServer.getKdcServer().getKdcSetting();
+    }
+
+    /**
+     * Construct with prepared conf dir.
+     *
+     * @param confDir The path of conf dir
+     * @throws KrbException e
+     */
+    public LocalHasAdmin(File confDir) throws KrbException {
+        this.confDir = confDir;
+        KdcConfig tmpKdcConfig = KdcUtil.getKdcConfig(confDir);
+        if (tmpKdcConfig == null) {
+            tmpKdcConfig = new KdcConfig();
+        }
+
+        BackendConfig tmpBackendConfig = KdcUtil.getBackendConfig(confDir);
+        if (tmpBackendConfig == null) {
+            tmpBackendConfig = new BackendConfig();
+        }
+
+        this.serverSetting = new KdcSetting(tmpKdcConfig, tmpBackendConfig);
+    }
+
+    @Override
+    public List<String> getPrincipals(String exp) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        List<String> princs = null;
+        LOG.info("The value of exp is : " + exp);
+        if (exp == null || exp.equals("")) {
+            try {
+                princs = kadmin.getPrincipals();
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        } else {
+            try {
+                princs = kadmin.getPrincipals(exp);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+        return princs;
+    }
+
+    @Override
+    public void addPrincipal(String principal, String password) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        if (principal == null) {
+            throw new HasException("Value of principal is null.");
+        }
+        if (password == null || password.equals("")) {
+            try {
+                kadmin.addPrincipal(principal);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        } else {
+            try {
+                kadmin.addPrincipal(principal, password);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+        LOG.info("Success to add principal :" + principal);
+    }
+
+    @Override
+    public void deletePrincipal(String principal) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+             throw new HasException(e);
+        }
+        if (principal == null) {
+            throw new IllegalArgumentException("Value of principal is null.");
+        }
+        try {
+            kadmin.deletePrincipal(principal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        LOG.info("Success to delete principal :" + principal);
+    }
+
+    @Override
+    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            kadmin.renamePrincipal(oldPrincipal, newPrincipal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        LOG.info("Success to rename principal : \"" + oldPrincipal
+                + "\" to \"" + newPrincipal + "\".");
+    }
+
+    @Override
+    public String addPrincByRole(String host, String role) throws HasException {
+        String result = "";
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        String releam = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
+        String[] princs = HostRoleType.valueOf(role).getPrincs();
+        if (princs == null) {
+            LOG.error("Cannot find the role of : " + role);
+            return "Cannot find the role of : " + role;
+        }
+        for (String princ : princs) {
+            try {
+                kadmin.addPrincipal(princ + releam);
+                LOG.info("Success to add princ :" + princ + releam);
+                result = result + "Success to add princ :" + princ + releam + "\n";
+            } catch (KrbException e) {
+                LOG.info(e.getMessage());
+                result = e.getMessage() + "\n";
+            }
+        }
+        return result;
+    }
+
+    @Override
+    public File getKeytabByHostAndRole(String host, String role) throws HasException {
+        LocalKadmin kadmin;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        String realm = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
+        File path = new File("/tmp/" + System.currentTimeMillis());
+        path.mkdirs();
+        File keytab = new File(path, role + "-" + host + ".keytab");
+        if (keytab.exists()) {
+            keytab.delete();
+        }
+        String[] princs = HostRoleType.valueOf(role).getPrincs();
+        for (String princ : princs) {
+            try {
+                if (kadmin.getPrincipal(princ + realm) == null) {
+                    continue;
+                }
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+            try {
+                kadmin.exportKeytab(keytab, princ + realm);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+        return keytab;
+    }
+
+    public void getKeytabByHostAndRole(String host, String role, File keytab) throws HasException {
+        LocalKadmin kadmin;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        String realm = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
+        if (keytab.exists()) {
+            keytab.delete();
+        }
+        String[] princs = HostRoleType.valueOf(role).getPrincs();
+        for (String princ : princs) {
+            try {
+                if (kadmin.getPrincipal(princ + realm) == null) {
+                    continue;
+                }
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+            try {
+                kadmin.exportKeytab(keytab, princ + realm);
+                System.out.println("Success to export keytab : " + keytab.getAbsolutePath());
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+    }
+
+    @Override
+    public List<String> getPrincipals() throws HasException {
+        LocalKadmin kadmin;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            return kadmin.getPrincipals();
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+    }
+
+    public KrbIdentity getPrincipal(String principalName) throws HasException {
+        LocalKadmin kadmin;
+        KrbIdentity identity;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            identity = kadmin.getPrincipal(principalName);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        return identity;
+    }
+
+    @Override
+    public void addPrincipal(String principal) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        if (principal == null) {
+            throw new HasException("Value of principal is null.");
+        }
+        try {
+            kadmin.addPrincipal(principal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        LOG.info("Success to add principal :" + principal);
+    }
+
+    @Override
+    public String getHadminPrincipal() {
+        return KrbUtil.makeKadminPrincipal(serverSetting.getKdcRealm()).getName();
+    }
+
+    /**
+     * get size of principal
+     */
+    @Override
+    public int size() throws HasException {
+        return this.getPrincipals().size();
+    }
+
+    @Override
+    public void setEnableOfConf(String isEnable) throws HasException {
+        File hasConf = new File(confDir, "has-server.conf");
+        if (!hasConf.exists()) {
+            System.err.println("has-server.conf is not exists.");
+            return;
+        }
+        try {
+            HasUtil.setEnableConf(hasConf, isEnable);
+        } catch (IOException e) {
+            System.err.println(e.getMessage());
+            return;
+        }
+    }
+
+    @Override
+    public void exportKeytab(File keytabFile, String principal)
+        throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            kadmin.exportKeytab(keytabFile, principal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public void exportKeytab(File keytabFile, List<String> principals)
+            throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            kadmin.exportKeytab(keytabFile, principals);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+    }
+
+    public void getHostRoles() {
+        for (HostRoleType role : HostRoleType.values()) {
+            System.out.print("\tHostRole: " + role.getName()
+                    + ", PrincipalNames: ");
+            String[] princs = role.getPrincs();
+            for (int j = 0; j < princs.length; j++) {
+                System.out.print(princs[j]);
+                if (j == princs.length - 1) {
+                    System.out.println();
+                } else {
+                    System.out.print(", ");
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/HasKdcHandler.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/HasKdcHandler.java b/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/HasKdcHandler.java
new file mode 100644
index 0000000..ce60739
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/HasKdcHandler.java
@@ -0,0 +1,315 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server.kdc;
+
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.has.server.HasServer;
+import org.apache.kerby.kerberos.kerb.KrbCodec;
+import org.apache.kerby.kerberos.kerb.KrbErrorCode;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.client.KrbContext;
+import org.apache.kerby.kerberos.kerb.common.EncryptionUtil;
+import org.apache.kerby.kerberos.kerb.common.KrbUtil;
+import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
+import org.apache.kerby.kerberos.kerb.server.KdcContext;
+import org.apache.kerby.kerberos.kerb.server.KdcRecoverableException;
+import org.apache.kerby.kerberos.kerb.server.KdcServer;
+import org.apache.kerby.kerberos.kerb.server.preauth.PreauthHandler;
+import org.apache.kerby.kerberos.kerb.server.request.AsRequest;
+import org.apache.kerby.kerberos.kerb.server.request.KdcRequest;
+import org.apache.kerby.kerberos.kerb.type.KerberosTime;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+import org.apache.kerby.kerberos.kerb.type.base.HostAddress;
+import org.apache.kerby.kerberos.kerb.type.base.HostAddresses;
+import org.apache.kerby.kerberos.kerb.type.base.KrbError;
+import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
+import org.apache.kerby.kerberos.kerb.type.base.KrbToken;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
+import org.apache.kerby.kerberos.kerb.type.base.TokenFormat;
+import org.apache.kerby.kerberos.kerb.type.kdc.AsReq;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcOption;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcOptions;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcReqBody;
+import org.apache.kerby.kerberos.kerb.type.pa.PaData;
+import org.apache.kerby.kerberos.kerb.type.pa.PaDataEntry;
+import org.apache.kerby.kerberos.kerb.type.pa.PaDataType;
+import org.apache.kerby.kerberos.kerb.type.pa.token.PaTokenRequest;
+import org.apache.kerby.kerberos.kerb.type.pa.token.TokenInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class HasKdcHandler {
+    private static final Logger LOG = LoggerFactory.getLogger(HasKdcHandler.class);
+
+    private KdcContext kdcContext;
+    private KrbContext krbContext;
+    private KdcServer kdcServer;
+
+    /**
+     * Constructor with has server.
+     *
+     * @param hasServer has server
+     */
+    public HasKdcHandler(HasServer hasServer) {
+        this.krbContext = new KrbContext();
+        this.krbContext.init(hasServer.getKrbSetting());
+        this.kdcServer = hasServer.getKdcServer();
+        prepareHandler(kdcServer);
+    }
+
+    public KrbContext getKrbContext() {
+        return krbContext;
+    }
+
+    public KdcContext getKdcContext() {
+        return kdcContext;
+    }
+
+    private KdcServer getKdcServer() {
+        return kdcServer;
+    }
+
+    private void prepareHandler(KdcServer kdcServer) {
+        this.kdcContext = new KdcContext(kdcServer.getKdcSetting());
+        this.kdcContext.setIdentityService(kdcServer.getIdentityService());
+        PreauthHandler preauthHandler = new PreauthHandler();
+        preauthHandler.init();
+        this.kdcContext.setPreauthHandler(preauthHandler);
+    }
+
+    private String getAudience(String name) {
+        return name + "/" + getKdcContext().getKdcRealm() + "@" + getKdcContext().getKdcRealm();
+    }
+
+    public KrbMessage getResponse(AuthToken authToken, String passPhrase) {
+        KrbMessage krbMessage = null;
+        try {
+            krbMessage = handleMessage(authToken, passPhrase);
+        } catch (KrbException e) {
+            LOG.error("Failed to handle message. " + e.getMessage());
+        }
+        return krbMessage;
+    }
+
+    /**
+     * Process the client request message.
+     */
+    public KrbMessage handleMessage(AuthToken authToken, String passPhrase) throws KrbException {
+
+        // set the audiences
+        List<String> auds = new ArrayList<String>();
+        String audience = getAudience("krbtgt");
+        auds.add(audience);
+        authToken.setAudiences(auds);
+
+        AsReq asReq = createAsReq(authToken);
+        KdcRequest kdcRequest = new AsRequest(asReq, kdcContext);
+        kdcRequest.setHttps(true);
+        List<EncryptionType> requestedTypes = getEncryptionTypes();
+        EncryptionType bestType = EncryptionUtil.getBestEncryptionType(requestedTypes,
+                kdcContext.getConfig().getEncryptionTypes());
+
+        if (bestType == null) {
+            LOG.error("Can't get the best encryption type.");
+            throw new KrbException(KrbErrorCode.KDC_ERR_ETYPE_NOSUPP);
+        }
+
+        PrincipalName clientPrincipal = new PrincipalName(authToken.getSubject());
+        String clientRealm = asReq.getReqBody().getRealm();
+        if (clientRealm == null || clientRealm.isEmpty()) {
+            clientRealm = getKdcContext().getKdcRealm();
+        }
+        clientPrincipal.setRealm(clientRealm);
+
+        // Set the client key
+        EncryptionKey clientKey = HasUtil.getClientKey(clientPrincipal.getName(),
+            passPhrase, bestType);
+        kdcRequest.setClientKey(clientKey);
+
+        // Set the token issuers
+        getKdcServer().getKdcConfig().setString(KdcConfigKey.TOKEN_ISSUERS, "has");
+
+        KrbMessage krbResponse;
+
+        try {
+            kdcRequest.process();
+            krbResponse = kdcRequest.getReply();
+        } catch (KrbException e) {
+            LOG.error("Error occurred when request tgt. " + e.getMessage());
+            if (e instanceof KdcRecoverableException) {
+                krbResponse = handleRecoverableException(
+                        (KdcRecoverableException) e, kdcRequest);
+            } else {
+                KrbError krbError = new KrbError();
+                krbError.setStime(KerberosTime.now());
+                krbError.setSusec(100);
+                if (e.getKrbErrorCode() != null) {
+                    krbError.setErrorCode(e.getKrbErrorCode());
+                } else {
+                    krbError.setErrorCode(KrbErrorCode.UNKNOWN_ERR);
+                }
+                krbError.setCrealm(kdcContext.getKdcRealm());
+                if (kdcRequest.getClientPrincipal() != null) {
+                    krbError.setCname(kdcRequest.getClientPrincipal());
+                }
+                krbError.setRealm(kdcContext.getKdcRealm());
+                if (kdcRequest.getServerPrincipal() != null) {
+                    krbError.setSname(kdcRequest.getServerPrincipal());
+                } else {
+                    PrincipalName serverPrincipal = kdcRequest.getKdcReq().getReqBody().getSname();
+                    serverPrincipal.setRealm(kdcRequest.getKdcReq().getReqBody().getRealm());
+                    krbError.setSname(serverPrincipal);
+                }
+                if (KrbErrorCode.KRB_AP_ERR_BAD_INTEGRITY.equals(e.getKrbErrorCode())) {
+                    krbError.setEtext("PREAUTH_FAILED");
+                } else {
+                    krbError.setEtext(e.getMessage());
+                }
+                krbResponse = krbError;
+            }
+        }
+        return krbResponse;
+    }
+
+    /**
+     * Process the recoverable exception.
+     *
+     * @param e The exception return by kdc
+     * @param kdcRequest kdc request
+     * @return The KrbError
+     */
+    private KrbMessage handleRecoverableException(KdcRecoverableException e,
+                                                  KdcRequest kdcRequest)
+            throws KrbException {
+        LOG.info("KRB error occurred while processing request:"
+                + e.getMessage());
+
+        KrbError error = e.getKrbError();
+        error.setStime(KerberosTime.now());
+        error.setSusec(100);
+        error.setErrorCode(e.getKrbError().getErrorCode());
+        error.setRealm(kdcContext.getKdcRealm());
+        if (kdcRequest != null) {
+            error.setSname(kdcRequest.getKdcReq().getReqBody().getCname());
+        } else {
+            error.setSname(new PrincipalName("NONE"));
+        }
+        error.setEtext(e.getMessage());
+        return error;
+    }
+
+    public AsReq createAsReq(AuthToken authToken) throws KrbException {
+        AsReq asReq = new AsReq();
+        KdcReqBody body = makeReqBody();
+        asReq.setReqBody(body);
+
+        PaTokenRequest tokenPa = new PaTokenRequest();
+        KrbToken krbToken = new KrbToken(authToken, TokenFormat.JWT);
+        tokenPa.setToken(krbToken);
+        TokenInfo info = new TokenInfo();
+        info.setTokenVendor(authToken.getIssuer());
+        tokenPa.setTokenInfo(info);
+
+        PaDataEntry paDataEntry = new PaDataEntry();
+        paDataEntry.setPaDataType(PaDataType.TOKEN_REQUEST);
+        paDataEntry.setPaDataValue(KrbCodec.encode(tokenPa));
+
+        PaData paData = new PaData();
+        paData.addElement(paDataEntry);
+        asReq.setPaData(paData);
+        return asReq;
+    }
+
+    /**
+     * Create the KdcReqBody
+     *
+     * @return KdcReqBody
+     *
+     * @throws KrbException e
+     */
+     protected KdcReqBody makeReqBody() throws KrbException {
+        KdcReqBody body = new KdcReqBody();
+
+        long startTime = System.currentTimeMillis();
+        body.setFrom(new KerberosTime(startTime));
+
+         // set the client principal as null
+        PrincipalName cName = null;
+        body.setCname(cName);
+
+        body.setRealm(getKrbContext().getKrbSetting().getKdcRealm());
+
+        PrincipalName sName = getServerPrincipal();
+        body.setSname(sName);
+
+        body.setTill(new KerberosTime(startTime + krbContext.getTicketValidTime()));
+
+        int nonce = krbContext.generateNonce();
+        body.setNonce(nonce);
+//        setChosenNonce(nonce);
+
+        body.setKdcOptions(getKdcOptions());
+
+        HostAddresses addresses = getHostAddresses();
+        if (addresses != null) {
+            body.setAddresses(addresses);
+        }
+
+        body.setEtypes(getEncryptionTypes());
+
+        return body;
+    }
+
+    private PrincipalName getServerPrincipal() {
+        return KrbUtil.makeTgsPrincipal(getKrbContext().getKrbSetting().getKdcRealm());
+    }
+
+    private KdcOptions getKdcOptions() {
+        KdcOptions kdcOptions = new KdcOptions();
+        // By default enforce these flags
+        kdcOptions.setFlag(KdcOption.FORWARDABLE);
+        kdcOptions.setFlag(KdcOption.PROXIABLE);
+        kdcOptions.setFlag(KdcOption.RENEWABLE_OK);
+        return kdcOptions;
+    }
+
+    public HostAddresses getHostAddresses() {
+        List<HostAddress> hostAddresses = new ArrayList<HostAddress>();
+        HostAddresses addresses = null;
+        //empty
+        if (!hostAddresses.isEmpty()) {
+            addresses = new HostAddresses();
+            for (HostAddress ha : hostAddresses) {
+                addresses.addElement(ha);
+            }
+        }
+        return addresses;
+    }
+
+    public List<EncryptionType> getEncryptionTypes() {
+        List<EncryptionType> encryptionTypes = krbContext.getConfig().getEncryptionTypes();
+        return EncryptionUtil.orderEtypesByStrength(encryptionTypes);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/MySQLConfKey.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/MySQLConfKey.java b/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/MySQLConfKey.java
new file mode 100644
index 0000000..0bf294a
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/MySQLConfKey.java
@@ -0,0 +1,52 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server.kdc;
+
+import org.apache.kerby.config.ConfigKey;
+
+/**
+ * Define all the MySQL backend related configuration items with default values.
+ */
+public enum MySQLConfKey implements ConfigKey {
+    MYSQL_DRIVER("com.mysql.jdbc.Driver"),
+    MYSQL_URL("jdbc:mysql://127.0.0.1:3306/mysqlbackend"),
+    MYSQL_USER("root"),
+    MYSQL_PASSWORD("passwd");
+
+    private Object defaultValue;
+
+    MySQLConfKey() {
+        this.defaultValue = null;
+    }
+
+    MySQLConfKey(Object defaultValue) {
+        this.defaultValue = defaultValue;
+    }
+
+    @Override
+    public String getPropertyKey() {
+        return name().toLowerCase();
+    }
+
+    @Override
+    public Object getDefaultValue() {
+        return this.defaultValue;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/MySQLIdentityBackend.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/MySQLIdentityBackend.java b/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/MySQLIdentityBackend.java
new file mode 100644
index 0000000..c3784c8
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/kdc/MySQLIdentityBackend.java
@@ -0,0 +1,426 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server.kdc;
+
+import org.apache.commons.dbutils.DbUtils;
+import org.apache.directory.api.util.GeneralizedTime;
+import org.apache.kerby.config.Config;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.identity.backend.AbstractIdentityBackend;
+import org.apache.kerby.kerberos.kerb.request.KrbIdentity;
+import org.apache.kerby.kerberos.kerb.type.KerberosTime;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.ResultSet;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import javax.sql.rowset.serial.SerialBlob;
+import java.util.Map;
+import java.util.List;
+import java.util.ArrayList;
+import java.text.ParseException;
+
+/**
+ * A MySQL based backend implementation.
+ */
+public class MySQLIdentityBackend extends AbstractIdentityBackend {
+    private Connection connection;
+    private String driver;
+    private String url;
+    private String user;
+    private String password;
+    private static final Logger LOG = LoggerFactory.getLogger(MySQLIdentityBackend.class);
+    private String identityTable;
+    private String keyInfoTable;
+
+    /**
+     * Constructing an instance using specified config that contains anything
+     * to be used to initialize an MySQL Backend.
+     * @param config . The config is used to config the backend.
+     */
+    public MySQLIdentityBackend(final Config config) {
+        setConfig(config);
+    }
+
+    public MySQLIdentityBackend() { }
+
+    /**
+     * Start the MySQL connection.
+     */
+    private void startConnection() throws KrbException {
+        try {
+            Class.forName(driver);
+            connection = DriverManager.getConnection(url, user, password);
+            if (!connection.isClosed()) {
+                LOG.info("Succeeded in connecting to MySQL.");
+            }
+        } catch (ClassNotFoundException e) {
+            throw new KrbException("JDBC Driver Class not found. ", e);
+        } catch (SQLException e) {
+            throw new KrbException("Failed to connecting to MySQL. ", e);
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected void doInitialize() throws KrbException {
+        LOG.info("Initializing the MySQL identity backend.");
+        driver = getConfig().getString(MySQLConfKey.MYSQL_DRIVER, true);
+        user = getConfig().getString(MySQLConfKey.MYSQL_USER, true);
+        password = getConfig().getString(MySQLConfKey.MYSQL_PASSWORD, true);
+
+        String urlString = getConfig().getString(MySQLConfKey.MYSQL_URL, true);
+        if (urlString == null || urlString.isEmpty()) {
+            urlString = getBackendConfig().getString(MySQLConfKey.MYSQL_URL, true);
+        }
+        url = urlString;
+
+        PreparedStatement preInitialize = null;
+        PreparedStatement preKdcRealm = null;
+        ResultSet resKdcRelam = null;
+        PreparedStatement preIdentity = null;
+        PreparedStatement preKey = null;
+        try {
+            startConnection();
+
+            // Set initialized for kdc config
+            String stmInitialize = "UPDATE `kdc_config` SET initialized = true WHERE id = 1";
+            preInitialize = connection.prepareStatement(stmInitialize);
+            preInitialize.executeUpdate();
+
+            // Get identity table name according to realm of kdc
+            String stmKdcRealm = "SELECT realm FROM `kdc_config`";
+            preKdcRealm = connection.prepareStatement(stmKdcRealm);
+            resKdcRelam = preKdcRealm.executeQuery();
+            if (resKdcRelam.next()) {
+                String realm = resKdcRelam.getString("realm").toLowerCase();
+                identityTable = "`" + realm + "_identity" + "`";
+                keyInfoTable = "`" + realm + "_key" + "`";
+            } else {
+                throw new KrbException("Failed to get kdc config.");
+            }
+
+            // Create identity table
+            String stmIdentity = "CREATE TABLE IF NOT EXISTS " + identityTable
+                + " (principal varchar(255) NOT NULL, key_version INTEGER "
+                + "DEFAULT 1, kdc_flags INTEGER DEFAULT 0, disabled bool "
+                + "DEFAULT NULL, locked bool DEFAULT NULL, expire_time "
+                + "VARCHAR(255) DEFAULT NULL, created_time VARCHAR(255) "
+                + "DEFAULT NULL, PRIMARY KEY (principal) ) ENGINE=INNODB;";
+            preIdentity = connection.prepareStatement(stmIdentity);
+            preIdentity.executeUpdate();
+
+            // Create key table
+            String stmKey = "CREATE TABLE IF NOT EXISTS " + keyInfoTable
+                + " (key_id INTEGER NOT NULL AUTO_INCREMENT, key_type "
+                + "VARCHAR(255) DEFAULT NULL, kvno INTEGER DEFAULT -1, "
+                + "key_value BLOB DEFAULT NULL, principal VARCHAR(255) NOT NULL,"
+                + "PRIMARY KEY (key_id), INDEX (principal), FOREIGN KEY "
+                + "(principal) REFERENCES " + identityTable + "(principal) "
+                + ") ENGINE=INNODB;";
+            preKey = connection.prepareStatement(stmKey);
+            preKey.executeUpdate();
+
+        } catch (SQLException e) {
+            LOG.error("Error occurred while initialize MySQL backend." + e.toString());
+            throw new KrbException("Failed to create table in database. ", e);
+        } finally {
+            DbUtils.closeQuietly(preInitialize);
+            DbUtils.closeQuietly(preKdcRealm);
+            DbUtils.closeQuietly(resKdcRelam);
+            DbUtils.closeQuietly(preIdentity);
+            DbUtils.closeQuietly(preKey);
+            DbUtils.closeQuietly(connection);
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected void doStop() throws KrbException {
+        try {
+            closeConnection();
+            if (connection.isClosed()) {
+                LOG.info("Succeeded in closing connection with MySQL.");
+            }
+        } catch (SQLException e) {
+            LOG.error("Failed to close connection with MySQL.");
+            throw new KrbException("Failed to close connection with MySQL. ", e);
+        }
+    }
+
+    /**
+     * Close the connection for stop().
+     * @throws SQLException if SQLException handled
+     */
+    private void closeConnection() throws SQLException {
+        if (!connection.isClosed()) {
+            connection.close();
+        }
+    }
+
+    /**
+     * Convert a KerberosTime type object to a generalized time form of String.
+     * @param kerberosTime The kerberos time to convert
+     */
+    private String toGeneralizedTime(final KerberosTime kerberosTime) {
+        GeneralizedTime generalizedTime = new GeneralizedTime(kerberosTime.getValue());
+        return generalizedTime.toString();
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected KrbIdentity doAddIdentity(KrbIdentity identity) throws KrbException {
+        String principalName = identity.getPrincipalName();
+        int keyVersion = identity.getKeyVersion();
+        int kdcFlags = identity.getKdcFlags();
+        boolean disabled = identity.isDisabled();
+        boolean locked = identity.isLocked();
+        String createdTime = toGeneralizedTime(identity.getCreatedTime());
+        String expireTime = toGeneralizedTime(identity.getExpireTime());
+        Map<EncryptionType, EncryptionKey> keys = identity.getKeys();
+
+        PreparedStatement preIdentity = null;
+        PreparedStatement preKey = null;
+
+        KrbIdentity duplicateIdentity = doGetIdentity(principalName);
+        if (duplicateIdentity != null) {
+            LOG.warn("The identity maybe duplicate.");
+
+            return duplicateIdentity;
+        } else {
+            try {
+                startConnection();
+                connection.setAutoCommit(false);
+
+                // Insert identity to identity table
+                String stmIdentity = "insert into " + identityTable + " values(?, ?, ?, ?, ?, ?, ?)";
+                preIdentity = connection.prepareStatement(stmIdentity);
+                preIdentity.setString(1, principalName);
+                preIdentity.setInt(2, keyVersion);
+                preIdentity.setInt(3, kdcFlags);
+                preIdentity.setBoolean(4, disabled);
+                preIdentity.setBoolean(5, locked);
+                preIdentity.setString(6, createdTime);
+                preIdentity.setString(7, expireTime);
+                preIdentity.executeUpdate();
+
+                // Insert keys to key table
+                for (Map.Entry<EncryptionType, EncryptionKey> entry : keys.entrySet()) {
+                    String stmKey = "insert into " + keyInfoTable + " (key_type, kvno, key_value, principal)"
+                        + " values(?, ?, ?, ?)";
+                    preKey = connection.prepareStatement(stmKey);
+                    preKey.setString(1, entry.getKey().getName());
+                    preKey.setInt(2, entry.getValue().getKvno());
+                    preKey.setBlob(3, new SerialBlob(entry.getValue().getKeyData()));
+                    preKey.setString(4, principalName);
+                    preKey.executeUpdate();
+                }
+
+                connection.commit();
+                return identity;
+            } catch (SQLException e) {
+                try {
+                    LOG.info("Transaction is being rolled back.");
+                    connection.rollback();
+                } catch (SQLException ex) {
+                    throw new KrbException("Transaction roll back failed. ", ex);
+                }
+                LOG.error("Error occurred while adding identity.");
+                throw new KrbException("Failed to add identity. ", e);
+            } finally {
+                DbUtils.closeQuietly(preIdentity);
+                DbUtils.closeQuietly(preKey);
+                doStop();
+            }
+        }
+    }
+
+    /**
+     * Create kerberos time.
+     * @param generalizedTime generalized time
+     * @throws ParseException parse exception
+     */
+    private KerberosTime createKerberosTime(final String generalizedTime) throws ParseException {
+        long time = new GeneralizedTime(generalizedTime).getTime();
+        return new KerberosTime(time);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected KrbIdentity doGetIdentity(final String principalName) throws KrbException {
+        KrbIdentity krbIdentity = new KrbIdentity(principalName);
+
+        PreparedStatement preIdentity = null;
+        ResultSet resIdentity = null;
+        PreparedStatement preKey = null;
+        ResultSet resKey = null;
+        try {
+            startConnection();
+
+            // Get identity from identity table
+            String stmIdentity = "SELECT * FROM " + identityTable + " where principal = ?";
+            preIdentity = connection.prepareStatement(stmIdentity);
+            preIdentity.setString(1, principalName);
+            resIdentity = preIdentity.executeQuery();
+
+            if (!resIdentity.next()) {
+                return null;
+            }
+
+            while (resIdentity.next()) {
+                krbIdentity.setKeyVersion(resIdentity.getInt("key_version"));
+                krbIdentity.setKdcFlags(resIdentity.getInt("kdc_flags"));
+                krbIdentity.setDisabled(resIdentity.getBoolean("disabled"));
+                krbIdentity.setLocked(resIdentity.getBoolean("locked"));
+                krbIdentity.setCreatedTime(createKerberosTime(resIdentity.getString("created_time")));
+                krbIdentity.setExpireTime(createKerberosTime(resIdentity.getString("expire_time")));
+            }
+
+            // Get keys from key table
+            List<EncryptionKey> keys = new ArrayList<>();
+            String stmKey = "SELECT * FROM " + keyInfoTable + " where principal = ?";
+            preKey = connection.prepareStatement(stmKey);
+            preKey.setString(1, principalName);
+            resKey = preKey.executeQuery();
+            while (resKey.next()) {
+                int kvno = resKey.getInt("kvno");
+                String keyType = resKey.getString("key_type");
+                EncryptionType eType = EncryptionType.fromName(keyType);
+                byte[] keyValue = resKey.getBytes("key_value");
+                EncryptionKey key = new EncryptionKey(eType, keyValue, kvno);
+                keys.add(key);
+            }
+
+            krbIdentity.addKeys(keys);
+            return krbIdentity;
+        } catch (SQLException e) {
+            LOG.error("Error occurred while getting identity.");
+            throw new KrbException("Failed to get identity. ", e);
+        } catch (ParseException e) {
+            throw new KrbException("Failed to get identity. ", e);
+        } finally {
+            DbUtils.closeQuietly(preIdentity);
+            DbUtils.closeQuietly(resIdentity);
+            DbUtils.closeQuietly(preKey);
+            DbUtils.closeQuietly(resKey);
+            doStop();
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected KrbIdentity doUpdateIdentity(KrbIdentity identity) throws KrbException {
+        String principalName = identity.getPrincipalName();
+        try {
+            doDeleteIdentity(principalName); // Delete former identity
+            doAddIdentity(identity); // Insert new identity
+        } catch (KrbException e) {
+            LOG.error("Error occurred while updating identity: " + principalName);
+            throw new KrbException("Failed to update identity. ", e);
+        }
+
+        return getIdentity(principalName);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected void doDeleteIdentity(String principalName) throws KrbException {
+        PreparedStatement preKey = null;
+        PreparedStatement preIdentity = null;
+        try {
+            startConnection();
+            connection.setAutoCommit(false);
+
+            // Delete keys from key table
+            String stmKey = "DELETE FROM  " + keyInfoTable + " where principal = ?";
+            preKey = connection.prepareStatement(stmKey);
+            preKey.setString(1, principalName);
+            preKey.executeUpdate();
+
+            // Dlete identity from identity table
+            String stmIdentity = "DELETE FROM " + identityTable + " where principal = ? ";
+            preIdentity = connection.prepareStatement(stmIdentity);
+            preIdentity.setString(1, principalName);
+            preIdentity.executeUpdate();
+
+            connection.commit();
+        } catch (SQLException e) {
+            try {
+                LOG.info("Transaction is being rolled back.");
+                connection.rollback();
+            } catch (SQLException ex) {
+                throw new KrbException("Transaction roll back failed. ", ex);
+            }
+            LOG.error("Error occurred while deleting identity.");
+            throw new KrbException("Failed to delete identity. ", e);
+        } finally {
+            DbUtils.closeQuietly(preIdentity);
+            DbUtils.closeQuietly(preKey);
+            doStop();
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected Iterable<String> doGetIdentities() throws KrbException {
+        List<String> identityNames = new ArrayList<>();
+        PreparedStatement preSmt = null;
+        ResultSet result = null;
+        try {
+            startConnection();
+            String statement = "SELECT * FROM " + identityTable;
+            preSmt = connection.prepareStatement(statement);
+            result = preSmt.executeQuery();
+            while (result.next()) {
+                identityNames.add(result.getString("principal"));
+            }
+            result.close();
+            preSmt.close();
+        } catch (SQLException e) {
+            LOG.error("Error occurred while getting identities.");
+            throw new KrbException("Failed to get identities. ", e);
+        } finally {
+            DbUtils.closeQuietly(preSmt);
+            DbUtils.closeQuietly(result);
+            doStop();
+        }
+
+        return identityNames;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/ConfFilter.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/ConfFilter.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/ConfFilter.java
new file mode 100644
index 0000000..7bfd035
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/ConfFilter.java
@@ -0,0 +1,54 @@
+package org.apache.kerby.has.server.web;
+
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.has.server.HasServer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import java.io.File;
+import java.io.IOException;
+@Private
+@Unstable
+public class ConfFilter implements Filter {
+    public static final Logger LOG = LoggerFactory.getLogger(ConfFilter.class);
+    @Override
+    public void init(FilterConfig filterConfig) throws ServletException {
+
+    }
+
+    @Override
+    public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse,
+                         FilterChain filterChain) throws IOException, ServletException {
+
+        final HasServer hasServer = WebServer.getHasServerFromContext(
+                servletRequest.getServletContext());
+        HasConfig hasConfig;
+        try {
+            hasConfig = HasUtil.getHasConfig(
+                    new File(hasServer.getConfDir(), "has-server.conf"));
+            String isEnableConf = hasConfig.getEnableConf();
+            if (!isEnableConf.equals("true")) {
+                throw new RuntimeException("The kdc has started.");
+            }
+            filterChain.doFilter(servletRequest, servletResponse);
+        } catch (HasException e) {
+            LOG.error(e.getMessage());
+        }
+    }
+
+    @Override
+    public void destroy() {
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/HostRoleType.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/HostRoleType.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/HostRoleType.java
new file mode 100644
index 0000000..104a41f
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/HostRoleType.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public enum HostRoleType {
+    HDFS("HDFS", new String[]{"HTTP", "hdfs"}),
+    YARN("YARN", new String[]{"yarn"}),
+    MAPRED("MAPRED", new String[]{"mapred"}),
+    HBASE("HBASE", new String[]{"hbase"}),
+    ZOOKEEPER("ZOOKEEPER", new String[]{"zookeeper"}),
+    SPARK("SPARK", new String[]{"spark"}),
+    HIVE("HIVE", new String[]{"hive"}),
+    OOZIE("OOZIE", new String[]{"oozie"}),
+    HUE("HUE", new String[]{"hue"});
+
+    private String name;
+    private String[] princs;
+
+    HostRoleType(String name, String[] princs) {
+        this.name = name;
+        this.princs = princs;
+    }
+
+    public String[] getPrincs() {
+        return princs;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+}


[14/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPluginRegistry.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPluginRegistry.java
deleted file mode 100644
index 0254ed6..0000000
--- a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPluginRegistry.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.client;
-
-import org.apache.hadoop.has.common.HasException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collections;
-import java.util.Map;
-import java.util.ServiceLoader;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-
-public class HasClientPluginRegistry {
-    static final Logger LOG = LoggerFactory.getLogger(HasClientPluginRegistry.class);
-
-    private static Map<String, Class> allPlugins = new ConcurrentHashMap<>();
-
-    static {
-        ServiceLoader<HasClientPlugin> plugins = ServiceLoader.load(HasClientPlugin.class);
-
-        for (HasClientPlugin plugin : plugins) {
-            allPlugins.put(plugin.getLoginType(), plugin.getClass());
-        }
-    }
-
-    public static Set<String> registeredPlugins() {
-        return Collections.unmodifiableSet(allPlugins.keySet());
-    }
-
-    public static boolean registeredPlugin(String name) {
-        return allPlugins.containsKey(name);
-    }
-
-    public static HasClientPlugin createPlugin(String name) throws HasException {
-        if (!registeredPlugin(name)) {
-            throw new HasException("Unregistered plugin " + name);
-        }
-        try {
-            HasClientPlugin clientPlugin = (HasClientPlugin) allPlugins.get(name).newInstance();
-            return clientPlugin;
-        } catch (Exception e) {
-            LOG.error("Create {} plugin failed", name, e);
-            throw new HasException(e.getMessage());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginException.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginException.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginException.java
deleted file mode 100644
index c07eb59..0000000
--- a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginException.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.client;
-
-import org.apache.hadoop.has.common.HasException;
-
-public class HasLoginException extends HasException {
-    private static final long serialVersionUID = 4140429098192628252L;
-
-    public HasLoginException(Throwable cause) {
-        super(cause);
-    }
-
-    public HasLoginException(String message) {
-        super(message);
-    }
-
-    public HasLoginException(String message, Throwable cause) {
-        super(message, cause);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginModule.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginModule.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginModule.java
deleted file mode 100644
index 6c71236..0000000
--- a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginModule.java
+++ /dev/null
@@ -1,491 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.has.client;
-
-import com.sun.security.auth.module.Krb5LoginModule;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.kerby.kerberos.kerb.ccache.Credential;
-import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import sun.security.jgss.krb5.Krb5Util;
-import sun.security.krb5.Credentials;
-import sun.security.krb5.KrbException;
-import sun.security.krb5.PrincipalName;
-
-import javax.security.auth.DestroyFailedException;
-import javax.security.auth.Subject;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.kerberos.KerberosPrincipal;
-import javax.security.auth.kerberos.KerberosTicket;
-import javax.security.auth.login.LoginException;
-import javax.security.auth.spi.LoginModule;
-import java.io.IOException;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * Login with tgt ticket
- * The client's TGT will be retrieved from the API of HasClient
- */
-//CHECKSTYLE.OFF
-public class HasLoginModule implements LoginModule {
-
-    public static final Logger LOG = LoggerFactory.getLogger(HasLoginModule.class);
-
-    Krb5LoginModule krb5LoginModule;
-
-    // initial state
-    private Subject subject;
-    private CallbackHandler callbackHandler;
-    private Map<String, Object> sharedState;
-    private Map<String, ?> options;
-
-    // configurable option
-    private boolean debug = false;
-    private boolean doNotPrompt = false;
-    private boolean useTgtTicket = false;
-    private String hadoopSecurityHas = null;
-    private String princName = null;
-
-    private boolean refreshKrb5Config = false;
-
-    // specify if initiator.
-    // perform authentication exchange if initiator
-    private boolean isInitiator = true;
-
-    // the authentication status
-    private boolean succeeded = false;
-    private boolean commitSucceeded = false;
-
-    private Credentials cred = null;
-
-    private PrincipalName principal = null;
-    private KerberosPrincipal kerbClientPrinc = null;
-    private KerberosTicket kerbTicket = null;
-    private StringBuffer krb5PrincName = null;
-    private boolean unboundServer = false;
-
-    /**
-     * Initialize this <code>LoginModule</code>.
-     * <p>
-     * <p>
-     *
-     * @param subject         the <code>Subject</code> to be authenticated. <p>
-     * @param callbackHandler a <code>CallbackHandler</code> for
-     *                        communication with the end user (prompting for
-     *                        usernames and passwords, for example). <p>
-     * @param sharedState     shared <code>LoginModule</code> state. <p>
-     * @param options         options specified in the login
-     *                        <code>Configuration</code> for this particular
-     *                        <code>LoginModule</code>.
-     */
-    public void initialize(Subject subject,
-                           CallbackHandler callbackHandler,
-                           Map<String, ?> sharedState,
-                           Map<String, ?> options) {
-
-        this.subject = subject;
-        this.callbackHandler = callbackHandler;
-        this.sharedState = (Map<String, Object>) sharedState;
-        this.options = options;
-
-        // initialize any configured options
-        useTgtTicket = "true".equalsIgnoreCase((String) options.get("useTgtTicket"));
-
-        if (useTgtTicket) {
-            debug = "true".equalsIgnoreCase((String) options.get("debug"));
-            doNotPrompt = "true".equalsIgnoreCase((String) options.get("doNotPrompt"));
-            useTgtTicket = "true".equalsIgnoreCase((String) options.get("useTgtTicket"));
-            hadoopSecurityHas = (String) options.get("hadoopSecurityHas");
-            princName = (String) options.get("principal");
-            refreshKrb5Config =
-                "true".equalsIgnoreCase((String) options.get("refreshKrb5Config"));
-
-            // check isInitiator value
-            String isInitiatorValue = ((String) options.get("isInitiator"));
-            if (isInitiatorValue != null) {
-                // use default, if value not set
-                isInitiator = "true".equalsIgnoreCase(isInitiatorValue);
-            }
-
-            if (debug) {
-                System.out.print("Debug is  " + debug
-                    + " doNotPrompt " + doNotPrompt
-                    + " isInitiator " + isInitiator
-                    + " refreshKrb5Config is " + refreshKrb5Config
-                    + " principal is " + princName + "\n");
-            }
-        } else {
-            krb5LoginModule = new Krb5LoginModule();
-            krb5LoginModule.initialize(subject, callbackHandler, sharedState, options);
-        }
-    }
-
-    /**
-     * Authenticate the user
-     * <p>
-     * <p>
-     *
-     * @return true in all cases since this <code>LoginModule</code>
-     * should not be ignored.
-     * @throws LoginException       if this <code>LoginModule</code>
-     *                              is unable to perform the authentication.
-     */
-    public boolean login() throws LoginException {
-
-        if (useTgtTicket) {
-            if (refreshKrb5Config) {
-                try {
-                    if (debug) {
-                        System.out.println("Refreshing Kerberos configuration");
-                    }
-                    sun.security.krb5.Config.refresh();
-                } catch (KrbException ke) {
-                    LoginException le = new LoginException(ke.getMessage());
-                    le.initCause(ke);
-                    throw le;
-                }
-            }
-            String principalProperty = System.getProperty("sun.security.krb5.principal");
-            if (principalProperty != null) {
-                krb5PrincName = new StringBuffer(principalProperty);
-            } else {
-                if (princName != null) {
-                    krb5PrincName = new StringBuffer(princName);
-                }
-            }
-
-            validateConfiguration();
-
-            if (krb5PrincName != null && krb5PrincName.toString().equals("*")) {
-                unboundServer = true;
-            }
-
-            // attempt the authentication by getting the username and pwd
-            // by prompting or configuration i.e. not from shared state
-
-            try {
-                attemptAuthentication(false);
-                succeeded = true;
-                cleanState();
-                return true;
-            } catch (LoginException e) {
-                // authentication failed -- clean out state
-                if (debug) {
-                    System.out.println("\t\t[HasLoginModule] "
-                        + "authentication failed \n"
-                        + e.getMessage());
-                }
-                succeeded = false;
-                cleanState();
-                throw e;
-            }
-        } else {
-            succeeded = krb5LoginModule.login();
-            return succeeded;
-        }
-    }
-
-    /**
-     * Process the configuration options
-     * Get the TGT from Has Client
-     */
-
-    private void attemptAuthentication(boolean getPasswdFromSharedState)
-        throws LoginException {
-
-        /*
-         * Check the creds cache to see whether
-         * we have TGT for this client principal
-         */
-        if (krb5PrincName != null) {
-            try {
-                principal = new PrincipalName(krb5PrincName.toString(),
-                        PrincipalName.KRB_NT_PRINCIPAL);
-            } catch (KrbException e) {
-                LoginException le = new LoginException(e.getMessage());
-                le.initCause(e);
-                throw le;
-            }
-        }
-
-        try {
-            if (useTgtTicket) {
-                if (debug) {
-                    System.out.println("use tgt ticket to login, acquire TGT TICKET...");
-                }
-
-                HasClient hasClient = new HasClient(hadoopSecurityHas);
-                TgtTicket tgtTicket = null;
-                try {
-                    tgtTicket = hasClient.requestTgt();
-                } catch (HasException e) {
-                    LoginException le = new LoginException(e.getMessage());
-                    le.initCause(e);
-                    throw le;
-                }
-                Credential credential = new Credential(tgtTicket);
-                boolean[] flags = new boolean[7];
-                int flag = credential.getTicketFlags().getFlags();
-                for (int i = 6; i >= 0; i--) {
-                    flags[i] = (flag & (1 << i)) != 0;
-                }
-                Date startTime = null;
-                if (credential.getStartTime() != null) {
-                    startTime = credential.getStartTime().getValue();
-                }
-                cred = new Credentials(credential.getTicket().encode(),
-                    credential.getClientName().getName(),
-                    credential.getServerName().getName(),
-                    credential.getKey().getKeyData(),
-                    credential.getKey().getKeyType().getValue(),
-                    flags,
-                    credential.getAuthTime().getValue(),
-                    startTime,
-                    credential.getEndTime().getValue(),
-                    credential.getRenewTill().getValue(),
-                    null);
-
-                if (cred != null) {
-                    // get the principal name from the ticket cache
-                    if (principal == null) {
-                        principal = cred.getClient();
-                    }
-                }
-                if (debug) {
-                    System.out.println("Principal is " + principal);
-                    if (cred == null) {
-                        System.out.println("null credentials from TGT Ticket");
-                    }
-                }
-            }
-        } catch (KrbException e) {
-            LoginException le = new LoginException(e.getMessage());
-            le.initCause(e);
-            throw le;
-        } catch (IOException ioe) {
-            LoginException ie = new LoginException(ioe.getMessage());
-            ie.initCause(ioe);
-            throw ie;
-        }
-    }
-
-    private void validateConfiguration() throws LoginException {
-        if (doNotPrompt && !useTgtTicket) {
-            throw new LoginException("Configuration Error"
-                + " - either doNotPrompt should be "
-                + " false or"
-                + " useTgtTicket"
-                + " should be true");
-        }
-
-        if (krb5PrincName != null && krb5PrincName.toString().equals("*")) {
-            if (isInitiator) {
-                throw new LoginException("Configuration Error"
-                        + " - principal cannot be * when isInitiator is true");
-            }
-        }
-    }
-
-    /**
-     * <p> This method is called if the LoginContext's
-     * overall authentication succeeded
-     *
-     * @return true if this LoginModule's own login and commit
-     * attempts succeeded, or false otherwise.
-     * @throws LoginException if the commit fails.
-     */
-
-    public boolean commit() throws LoginException {
-        if (debug) {
-            System.out.println("Login success? " + succeeded);
-        }
-
-        if (useTgtTicket) {
-        /*
-         * Let us add the Krb5 Creds to the Subject's
-         * private credentials. The credentials are of type
-         * KerberosKey or KerberosTicket
-         */
-            if (succeeded == false) {
-                return false;
-            } else {
-
-                if (isInitiator && (cred == null)) {
-                    succeeded = false;
-                    throw new LoginException("Null Client Credential");
-                }
-
-                if (subject.isReadOnly()) {
-                    cleanKerberosCred();
-                    throw new LoginException("Subject is Readonly");
-                }
-
-            /*
-             * Add the Principal (authenticated identity)
-             * to the Subject's principal set and
-             * add the credentials (TGT or Service key) to the
-             * Subject's private credentials
-             */
-
-                Set<Object> privCredSet = subject.getPrivateCredentials();
-                Set<java.security.Principal> princSet = subject.getPrincipals();
-                kerbClientPrinc = new KerberosPrincipal(principal.getName());
-
-                // create Kerberos Ticket
-                if (isInitiator) {
-                    kerbTicket = Krb5Util.credsToTicket(cred);
-                }
-
-                // Let us add the kerbClientPrinc,kerbTicket
-
-                // We won't add "*" as a KerberosPrincipal
-                if (!unboundServer
-                    && !princSet.contains(kerbClientPrinc)) {
-                    princSet.add(kerbClientPrinc);
-                }
-
-                // add the TGT
-                if (kerbTicket != null) {
-                    if (!privCredSet.contains(kerbTicket)) {
-                        privCredSet.add(kerbTicket);
-                    }
-                }
-            }
-            commitSucceeded = true;
-            if (debug) {
-                System.out.println("Commit Succeeded \n");
-            }
-            return true;
-        } else {
-            return krb5LoginModule.commit();
-        }
-    }
-
-    /**
-     * <p> This method is called if the LoginContext's
-     * overall authentication failed.
-     *
-     * @return false if this LoginModule's own login and/or commit attempts
-     * failed, and true otherwise.
-     * @throws LoginException if the abort fails.
-     */
-
-    public boolean abort() throws LoginException {
-        if (useTgtTicket) {
-            if (succeeded == false) {
-                return false;
-            } else if (succeeded == true && commitSucceeded == false) {
-                // login succeeded but overall authentication failed
-                succeeded = false;
-                cleanKerberosCred();
-            } else {
-                // overall authentication succeeded and commit succeeded,
-                // but someone else's commit failed
-                logout();
-            }
-            return true;
-        } else {
-            return krb5LoginModule.abort();
-        }
-    }
-
-    /**
-     * Logout the user.
-     * <p>
-     * <p> This method removes the <code>Krb5Principal</code>
-     * that was added by the <code>commit</code> method.
-     * <p>
-     * <p>
-     *
-     * @return true in all cases since this <code>LoginModule</code>
-     * should not be ignored.
-     * @throws LoginException if the logout fails.
-     */
-    public boolean logout() throws LoginException {
-
-        if (useTgtTicket) {
-            if (debug) {
-                System.out.println("\t\t[Krb5LoginModule]: "
-                    + "Entering logout");
-            }
-
-            if (subject.isReadOnly()) {
-                cleanKerberosCred();
-                throw new LoginException("Subject is Readonly");
-            }
-
-            subject.getPrincipals().remove(kerbClientPrinc);
-            // Let us remove all Kerberos credentials stored in the Subject
-            Iterator<Object> it = subject.getPrivateCredentials().iterator();
-            while (it.hasNext()) {
-                Object o = it.next();
-                if (o instanceof KerberosTicket) {
-                    it.remove();
-                }
-            }
-            // clean the kerberos ticket and keys
-            cleanKerberosCred();
-
-            succeeded = false;
-            commitSucceeded = false;
-            if (debug) {
-                System.out.println("\t\t[HasLoginModule]: "
-                    + "logged out Subject");
-            }
-            return true;
-        } else {
-            return krb5LoginModule.logout();
-        }
-    }
-
-    /**
-     * Clean Kerberos credentials
-     */
-    private void cleanKerberosCred() throws LoginException {
-        // Clean the ticket and server key
-        try {
-            if (kerbTicket != null) {
-                kerbTicket.destroy();
-            }
-        } catch (DestroyFailedException e) {
-            throw new LoginException("Destroy Failed on Kerberos Private Credentials");
-        }
-        kerbTicket = null;
-        kerbClientPrinc = null;
-    }
-
-    /**
-     * Clean out the state
-     */
-    private void cleanState() {
-
-        if (!succeeded) {
-            // remove temp results for the next try
-            principal = null;
-        }
-        if (krb5PrincName != null && krb5PrincName.length() != 0) {
-            krb5PrincName.delete(0, krb5PrincName.length());
-        }
-        krb5PrincName = null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/kerby/has/client/AbstractHasClientPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/kerby/has/client/AbstractHasClientPlugin.java b/has/has-client/src/main/java/org/apache/kerby/has/client/AbstractHasClientPlugin.java
new file mode 100644
index 0000000..f60a6d0
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/kerby/has/client/AbstractHasClientPlugin.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.client;
+
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.kerberos.kerb.KrbRuntime;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class AbstractHasClientPlugin implements HasClientPlugin {
+    public static final Logger LOG = LoggerFactory.getLogger(AbstractHasClientPlugin.class);
+
+    protected abstract void doLogin(AuthToken token) throws HasLoginException;
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public AuthToken login(HasConfig conf) throws HasLoginException {
+
+        AuthToken authToken = KrbRuntime.getTokenProvider("JWT").createTokenFactory().createToken();
+
+        doLogin(authToken);
+
+        return authToken;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/kerby/has/client/HasAdminClient.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/kerby/has/client/HasAdminClient.java b/has/has-client/src/main/java/org/apache/kerby/has/client/HasAdminClient.java
new file mode 100644
index 0000000..7b6457a
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/kerby/has/client/HasAdminClient.java
@@ -0,0 +1,480 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.client;
+
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.api.client.config.ClientConfig;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.client.urlconnection.HTTPSProperties;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.kerby.has.common.HasAdmin;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.ssl.SSLFactory;
+import org.apache.kerby.has.common.util.URLConnectionFactory;
+import org.apache.kerby.kerberos.kerb.common.KrbUtil;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.glassfish.jersey.SslConfigurator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLSession;
+import javax.ws.rs.core.MultivaluedMap;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A Admin client API for applications to interact with KDC
+ */
+public class HasAdminClient implements HasAdmin {
+
+    public static final Logger LOG = LoggerFactory.getLogger(HasAdminClient.class);
+
+    private HasConfig hasConfig;
+    private File confDir;
+
+    public HasAdminClient(HasConfig hasConfig) {
+        this.hasConfig = hasConfig;
+    }
+    public HasAdminClient(HasConfig hasConfig, File confDir) {
+        this.hasConfig = hasConfig;
+        this.confDir = confDir;
+    }
+
+    public File getConfDir() {
+        return confDir;
+    }
+
+    public HasConfig getHasConfig() {
+        return hasConfig;
+    }
+
+    protected HttpURLConnection getHttpsConnection(URL url, boolean isSpnego) throws Exception {
+        HasConfig conf = new HasConfig();
+
+        conf.setString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
+        String sslClientConf = hasConfig.getSslClientConf();
+        conf.setString(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConf);
+        conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, false);
+
+        URLConnectionFactory connectionFactory = URLConnectionFactory
+                .newDefaultURLConnectionFactory(conf);
+        return (HttpURLConnection) connectionFactory.openConnection(url, isSpnego, hasConfig);
+    }
+
+    private WebResource getWebResource(String restName) {
+        Client client;
+        String server = null;
+        if ((hasConfig.getHttpsPort() != null) && (hasConfig.getHttpsHost() != null)) {
+            server = "https://" + hasConfig.getHttpsHost() + ":" + hasConfig.getHttpsPort()
+                    + "/has/v1/" + restName;
+            LOG.info("Admin request url: " + server);
+            HasConfig conf = new HasConfig();
+            try {
+                conf.addIniConfig(new File(hasConfig.getSslClientConf()));
+            } catch (IOException e) {
+                throw new RuntimeException("Errors occurred when adding ssl conf. "
+                    + e.getMessage());
+            }
+            SslConfigurator sslConfigurator = SslConfigurator.newInstance()
+                    .trustStoreFile(conf.getString("ssl.client.truststore.location"))
+                    .trustStorePassword(conf.getString("ssl.client.truststore.password"));
+            sslConfigurator.securityProtocol("SSL");
+            SSLContext sslContext = sslConfigurator.createSSLContext();
+            ClientConfig clientConfig = new DefaultClientConfig();
+            clientConfig.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES,
+                    new HTTPSProperties(new HostnameVerifier() {
+                        @Override
+                        public boolean verify(String s, SSLSession sslSession) {
+                            return false;
+                        }
+                    }, sslContext));
+            client = Client.create(clientConfig);
+        } else {
+            client = Client.create();
+        }
+        if (server == null) {
+            throw new RuntimeException("Please set the https address and port.");
+        }
+        return client.resource(server);
+    }
+
+    /**
+     * Change principals JSON string to a List.
+     *
+     * @param princs principals JSON string which like
+     *               "["HTTP\/host1@HADOOP.COM","HTTP\/host2@HADOOP.COM"]"
+     * @return principalLists.
+     */
+    private List<String> getPrincsList(String princs) {
+        List<String> principalLists = new ArrayList<>();
+        try {
+            JSONArray principals = new JSONArray(princs);
+            for (int i = 0; i < principals.length(); i++) {
+                principalLists.add("\t" + principals.getString(i));
+            }
+        } catch (Exception e) {
+            System.err.println("Errors occurred when getting the principals."
+                + e.getMessage());
+        }
+        return principalLists;
+    }
+
+    public void requestCreatePrincipals(String hostRoles) throws HasException {
+        WebResource webResource = getWebResource("admin/createprincipals");
+        String response = webResource.entity(hostRoles.toString().getBytes()).put(String.class);
+        try {
+            System.out.println(new JSONObject(response).getString("msg"));
+        } catch (JSONException e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public void addPrincipal(String principal) throws HasException {
+        WebResource webResource = getWebResource("admin/addprincipal");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", principal);
+        String response = webResource.queryParams(params).post(String.class);
+        try {
+            System.out.println(new JSONObject(response).getString("msg"));
+        } catch (JSONException e) {
+            System.err.println("Errors occurred when getting the message from response."
+                + e.getMessage());
+        }
+    }
+
+    @Override
+    public File getKeytabByHostAndRole(String host, String role) throws HasException {
+        WebResource webResource = getWebResource("admin/exportkeytabs");
+
+        String keytabName = host + ".zip";
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("host", host);
+        if (!role.equals("")) {
+            params.add("role", role);
+            keytabName = role + "-" + host + ".keytab";
+        }
+        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
+        if (response.getStatus() != 200) {
+            System.err.println("Error : connection denied.");
+            return null;
+        }
+        FileOutputStream fos = null;
+        try {
+            fos = new FileOutputStream(new File(keytabName));
+        } catch (FileNotFoundException e) {
+            System.err.println(e.getMessage());
+        }
+        InputStream in = response.getEntityInputStream();
+        byte[] buffer = new byte[4 * 1024];
+        int read;
+        try {
+            while ((read = in.read(buffer)) > 0) {
+                fos.write(buffer, 0, read);
+            }
+            fos.close();
+            in.close();
+        } catch (IOException e) {
+            System.err.println("Errors occurred when reading the buffer to write keytab file."
+                + e.getMessage());
+        }
+        System.out.println("Accept keytab file \"" + keytabName + "\" from server.");
+        return new File(keytabName);
+    }
+
+    @Override
+    public void addPrincipal(String principal, String password) throws HasException {
+        WebResource webResource = getWebResource("admin/addprincipal");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", principal);
+        params.add("password", password);
+        String response = webResource.queryParams(params).post(String.class);
+        try {
+            System.out.println(new JSONObject(response).getString("msg"));
+        } catch (JSONException e) {
+            System.err.println("Errors occurred when getting the message from response."
+                + e.getMessage());
+        }
+    }
+
+    @Override
+    public void deletePrincipal(String principal) throws HasException {
+        WebResource webResource = getWebResource("admin/deleteprincipal");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", principal);
+        String response = webResource.queryParams(params).delete(String.class);
+        try {
+            System.out.println(new JSONObject(response).getString("msg"));
+        } catch (JSONException e) {
+            System.err.println("Errors occurred when getting the message from response."
+                + e.getMessage());
+        }
+    }
+
+    @Override
+    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
+        WebResource webResource = getWebResource("admin/renameprincipal");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("oldprincipal", oldPrincipal);
+        params.add("newprincipal", newPrincipal);
+        String response = webResource.queryParams(params).post(String.class);
+        try {
+            System.out.println(new JSONObject(response).getString("msg"));
+        } catch (JSONException e) {
+            System.err.println(e.getMessage());
+        }
+    }
+
+    @Override
+    public List<String> getPrincipals() throws HasException {
+        WebResource webResource = getWebResource("admin/getprincipals");
+
+        String response = webResource.get(String.class);
+        String princs = null;
+        try {
+            princs = new JSONObject(response).getString("msg");
+        } catch (JSONException e) {
+            System.err.println("Errors occurred when getting the message from response."
+                + e.getMessage());
+        }
+        return getPrincsList(princs);
+    }
+
+    @Override
+    public List<String> getPrincipals(String exp) throws HasException {
+        WebResource webResource = getWebResource("admin/getprincipals");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("exp", exp);
+        String response = webResource.queryParams(params).get(String.class);
+        return getPrincsList(response);
+    }
+
+    /**
+     * Create http connection to has server.
+     *
+     * @param url
+     * @param method
+     * @return connection
+     * @throws IOException
+     */
+    protected HttpURLConnection createConnection(URL url, String method) throws IOException {
+        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+        conn.setRequestMethod(method);
+        if (method.equals("POST") || method.equals("PUT")) {
+            conn.setDoOutput(true);
+        }
+        return conn;
+    }
+
+    @Override
+    public String addPrincByRole(String host, String role) throws HasException {
+        //TODO
+        return null;
+    }
+
+    @Override
+    public String getHadminPrincipal() {
+        return KrbUtil.makeKadminPrincipal(hasConfig.getRealm()).getName();
+    }
+
+    /**
+     * get size of principal
+     */
+    @Override
+    public int size() throws HasException {
+        return this.getPrincipals().size();
+    }
+
+    public String getKrb5conf() {
+        WebResource webResource = getWebResource("getkrb5conf");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            return response.getEntity(String.class);
+        }
+        return null;
+    }
+
+    public String getHasconf() {
+        WebResource webResource = getWebResource("gethasconf");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            return response.getEntity(String.class);
+        }
+        return null;
+    }
+    public void setPlugin(String plugin) {
+        WebResource webResource = getWebResource("conf/setplugin");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("plugin", plugin);
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            System.out.println(response.getEntity(String.class));
+        } else if (response.getStatus() == 400) {
+            System.err.println(response.getEntity(String.class));
+        }
+    }
+    public void configKdc(String port, String realm, String host) {
+        WebResource webResource = getWebResource("conf/configkdc");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("port", port);
+        params.add("realm", realm);
+        params.add("host", host);
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            System.out.println(response.getEntity(String.class));
+        } else if (response.getStatus() == 400) {
+            System.err.println(response.getEntity(String.class));
+        }
+    }
+    public void configKdcBackend(String backendType, String dir, String url, String user,
+                                 String password) {
+        WebResource webResource = getWebResource("conf/configkdcbackend");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("backendType", backendType);
+        if (backendType.equals("json")) {
+            params.add("dir", dir);
+        } else if (backendType.equals("mysql")) {
+            params.add("url", url);
+            params.add("user", user);
+            params.add("password", password);
+        }
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            System.out.println(response.getEntity(String.class));
+        } else if (response.getStatus() == 400) {
+            System.err.println(response.getEntity(String.class));
+        }
+    }
+    public void startKdc() {
+        WebResource webResource = getWebResource("kdcstart");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        try {
+            JSONObject result = new JSONObject(response.getEntity(String.class));
+            if (result.getString("result").equals("success")) {
+                System.out.println(result.getString("msg"));
+            } else {
+                System.err.println(result.getString("msg"));
+            }
+        } catch (JSONException e) {
+            System.err.println(e.getMessage());
+        }
+    }
+    public InputStream initKdc() {
+        WebResource webResource = getWebResource("kdcinit");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            return response.getEntityInputStream();
+        }
+        return null;
+    }
+    public String getHostRoles() {
+        WebResource webResource = getWebResource("hostroles");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            return response.getEntity(String.class);
+        }
+        return null;
+    }
+    public void setEnableOfConf(String isEnable) throws HasException {
+        WebResource webResource = getWebResource("admin/setconf");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("isEnable", isEnable);
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            System.out.println(response.getEntity(String.class));
+        } else {
+            System.err.println(response.getEntity(String.class));
+        }
+    }
+
+    @Override
+    public void exportKeytab(File keytab, String principal) throws HasException {
+        WebResource webResource = getWebResource("admin/exportkeytab");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", principal);
+        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
+        FileOutputStream fos;
+        try {
+            fos = new FileOutputStream(keytab);
+        } catch (FileNotFoundException e) {
+            throw new HasException("The keytab file: " + keytab + "not exist. " + e);
+        }
+        InputStream in = response.getEntityInputStream();
+        byte[] buffer = new byte[4 * 1024];
+        int read;
+        try {
+            while ((read = in.read(buffer)) > 0) {
+                fos.write(buffer, 0, read);
+            }
+            fos.close();
+            in.close();
+        } catch (IOException e) {
+            System.err.println("Errors occurred when writing the buffer to keytab file." + e.toString());
+        }
+        System.out.println("Accept keytab file \"" + keytab.getName() + "\" from server successfully.");
+    }
+
+    @Override
+    public void exportKeytab(File keytabFile, List<String> principals) throws HasException {
+        WebResource webResource = getWebResource("admin/exportkeytab");
+        for (String principal: principals) {
+            MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+            params.add("principal", principal);
+            ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
+            FileOutputStream fos;
+            try {
+                fos = new FileOutputStream(keytabFile);
+            } catch (FileNotFoundException e) {
+                throw new HasException("The keytab file: " + keytabFile.getName() + "not exist. " + e);
+            }
+            InputStream in = response.getEntityInputStream();
+            byte[] buffer = new byte[4 * 1024];
+            int read;
+            try {
+                while ((read = in.read(buffer)) > 0) {
+                    fos.write(buffer, 0, read);
+                }
+                fos.close();
+                in.close();
+            } catch (IOException e) {
+                LOG.error("Errors occurred when writing the buffer to keytab file." + e.toString());
+            }
+        }
+        System.out.println("Accept keytab file \"" + keytabFile.getName() + "\" from server successfully.");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/kerby/has/client/HasAuthAdminClient.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/kerby/has/client/HasAuthAdminClient.java b/has/has-client/src/main/java/org/apache/kerby/has/client/HasAuthAdminClient.java
new file mode 100644
index 0000000..d8523f0
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/kerby/has/client/HasAuthAdminClient.java
@@ -0,0 +1,553 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.client;
+
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.ProtocolException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+public class HasAuthAdminClient extends HasAdminClient {
+    public static final Logger LOG = LoggerFactory.getLogger(HasAuthAdminClient.class);
+
+    /**
+     * Create an instance of the HasAuthAdminClient.
+     *
+     * @param hasConfig the has config
+     */
+    public HasAuthAdminClient(HasConfig hasConfig) {
+        super(hasConfig);
+    }
+
+    /**
+     * Create an authenticated connection to the Has server.
+     * <p>
+     * It uses Hadoop-auth client authentication which by default supports
+     * Kerberos HTTP SPNEGO, Pseudo/Simple and anonymous.
+     *
+     * @param url    the URL to open a HTTP connection to.
+     * @param method the HTTP method for the HTTP connection.
+     * @return an authenticated connection to the has server.
+     * @throws IOException if an IO error occurred.
+     */
+    @Override
+    protected HttpURLConnection createConnection(URL url, String method) {
+        HttpURLConnection conn = null;
+        if ((getHasConfig().getHttpsPort() != null) && (getHasConfig().getHttpsHost() != null)) {
+            try {
+                conn = super.getHttpsConnection(url, true);
+            } catch (Exception e) {
+                System.err.println(e.getMessage());
+            }
+        }
+        if (method.equals("POST") || method.equals("PUT")) {
+            conn.setDoOutput(true);
+        }
+        return conn;
+    }
+
+    private String getBaseURL() {
+        String url = null;
+        if ((getHasConfig().getHttpsPort() != null) && (getHasConfig().getHttpsHost() != null)) {
+            url = "https://" + getHasConfig().getHttpsHost() + ":" + getHasConfig().getHttpsPort()
+                + "/has/v1/admin/";
+        }
+        if (url == null) {
+            throw new RuntimeException("Please set the https address and port.");
+        }
+        return url;
+    }
+
+    public void addPrincipal(String principal) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "addprincipal?principal=" + principal);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "POST");
+
+        httpConn.setRequestProperty("Content-Type",
+            "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("POST");
+        } catch (ProtocolException e) {
+            LOG.error("Fail to add principal. " + e);
+            throw new HasException(e);
+        }
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(getResponse(httpConn));
+            } else {
+                throw new HasException("Fail to add principal \"" + principal + "\".");
+            }
+        } catch (Exception e) {
+            LOG.error("Fail to add principal. " + e);
+            throw new HasException(e);
+        }
+    }
+
+    public void setEnableOfConf(String isEnable) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "setconf?isEnable=" + isEnable);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "PUT");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("PUT");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+            InputStream inputStream = httpConn.getResponseCode() == 200
+                    ? httpConn.getInputStream() : httpConn.getErrorStream();
+            BufferedReader reader = new BufferedReader(
+                    new InputStreamReader(inputStream));
+            String s;
+            StringBuilder result = new StringBuilder();
+            while ((s = reader.readLine()) != null) {
+                result.append(s);
+            }
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(result);
+            } else {
+                System.err.println(result);
+            }
+        } catch (Exception e) {
+            LOG.error("Fail to connect to server. " + e);
+            throw new HasException(e);
+        }
+    }
+
+    /**
+     * Change principals JSON string to a List.
+     *
+     * @param princs principals JSON string which like
+     *               "["HTTP\/host1@HADOOP.COM","HTTP\/host2@HADOOP.COM"]"
+     * @return principalLists.
+     */
+    private List<String> getPrincsList(String princs) {
+        List<String> principalLists = new ArrayList<>();
+        try {
+            JSONArray principals = new JSONArray(princs);
+            for (int i = 0; i < principals.length(); i++) {
+                principalLists.add("\t" + principals.getString(i));
+            }
+        } catch (Exception e) {
+            System.err.println(e.getMessage());
+        }
+        return principalLists;
+    }
+
+    @Override
+    public void requestCreatePrincipals(String hostRoles) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "createprincipals");
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "POST");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("PUT");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        httpConn.setDoOutput(true);
+        httpConn.setDoInput(true);
+        try {
+            httpConn.connect();
+            OutputStream out = httpConn.getOutputStream();
+            out.write(hostRoles.toString().getBytes());
+            out.flush();
+            out.close();
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(getResponse(httpConn));
+            } else {
+                throw new HasException("Connection deined.");
+            }
+        } catch (Exception e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public File getKeytabByHostAndRole(String host, String role) throws HasException {
+        String keytabName = host + ".zip";
+        HttpURLConnection httpConn;
+        String request = getBaseURL() + "exportkeytabs?host=" + host;
+        if (!role.equals("")) {
+            request = request + "&role=" + role;
+            keytabName = role + "-" + host + ".keytab";
+        }
+
+        URL url;
+        try {
+            url = new URL(request);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "GET");
+
+        httpConn.setRequestProperty("Content-Type",
+            "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("GET");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        httpConn.setDoOutput(true);
+        httpConn.setDoInput(true);
+        try {
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() != 200) {
+                System.err.println("Error : connection denied.");
+                return null;
+            }
+            FileOutputStream fos = new FileOutputStream(new File(keytabName));
+            InputStream in = httpConn.getInputStream();
+            byte[] buffer = new byte[4 * 1024];
+            int read;
+            while ((read = in.read(buffer)) > 0) {
+                fos.write(buffer, 0, read);
+            }
+            fos.close();
+            in.close();
+        } catch (IOException e) {
+            throw new HasException(e);
+        }
+        System.out.println("Accept keytab file \"" + keytabName + "\" from server.");
+
+        return new File(keytabName);
+    }
+
+    @Override
+    public void exportKeytab(File keytab, String principal) throws HasException {
+        URL url = null;
+        try {
+            url = new URL(getBaseURL() + "exportkeytab?principal=" + principal);
+        } catch (MalformedURLException e) {
+            LOG.error("Fail to get url. " + e);
+            throw new HasException("Fail to get url.", e);
+        }
+
+        HttpURLConnection httpConn = createConnection(url, "GET");
+        httpConn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("GET");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        httpConn.setDoOutput(true);
+        httpConn.setDoInput(true);
+        try {
+            httpConn.connect();
+            if (httpConn.getResponseCode() != 200) {
+                System.err.println("Error: connection denied.");
+            }
+            FileOutputStream fos = new FileOutputStream(keytab);
+            InputStream in = httpConn.getInputStream();
+            byte[] buffer = new byte[3 * 1024];
+            int read;
+            while ((read = in.read(buffer)) > 0) {
+                fos.write(buffer, 0, read);
+            }
+            fos.close();
+            in.close();
+        } catch (IOException e) {
+            throw new HasException(e);
+        }
+        System.out.println("Receive keytab file \"" + keytab.getName() + "\" from server successfully.");
+    }
+
+    @Override
+    public void exportKeytab(File keytabFile, List<String> principals) throws HasException {
+        HttpURLConnection httpConn;
+        for (String principal: principals) {
+            String request = getBaseURL() + "exportkeytab?principal=" + principal;
+            URL url;
+            try {
+                url = new URL(request);
+            } catch (MalformedURLException e) {
+                throw new HasException(e);
+            }
+            httpConn = createConnection(url, "GET");
+            httpConn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
+            try {
+                httpConn.setRequestMethod("GET");
+            } catch (ProtocolException e) {
+                throw new HasException(e);
+            }
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            try {
+                httpConn.connect();
+                if (httpConn.getResponseCode() != 200) {
+                    System.err.println("Error: connection denied.");
+                }
+                FileOutputStream fos = new FileOutputStream(keytabFile);
+                InputStream in = httpConn.getInputStream();
+                byte[] buffer = new byte[4 * 1024];
+                int read;
+                while ((read = in.read(buffer)) > 0) {
+                    fos.write(buffer, 0, read);
+                }
+                fos.close();
+                in.close();
+            } catch (IOException e) {
+                throw new HasException(e);
+            }
+        }
+        System.out.println("Accept keytab file \"" + keytabFile.getName() + "\" from server.");
+    }
+
+    @Override
+    public void addPrincipal(String principal, String password) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url = null;
+        try {
+            url = new URL(getBaseURL() + "addprincipal?principal=" + principal
+                            + "&password=" + password);
+        } catch (MalformedURLException e) {
+            throw new HasException("Fail to get url.", e);
+        }
+
+        httpConn = createConnection(url, "POST");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("POST");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(getResponse(httpConn));
+            } else {
+                throw new HasException("Fail to add principal \"" + principal + "\".");
+            }
+        } catch (Exception e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public void deletePrincipal(String principal) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "deleteprincipal?principal=" + principal);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "DELETE");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("DELETE");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(getResponse(httpConn));
+            } else {
+                throw new HasException("Connection deined.");
+            }
+        } catch (Exception e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "renameprincipal?oldprincipal=" + oldPrincipal
+                            + "&newprincipal=" + newPrincipal);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "POST");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("POST");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(getResponse(httpConn));
+            } else {
+                throw new HasException("Connection to renameprincipal deined.");
+            }
+        } catch (Exception e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public List<String> getPrincipals() throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "getprincipals");
+        } catch (MalformedURLException e) {
+            System.err.println(e.getMessage());
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "GET");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("GET");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        String response;
+        try {
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                response = getResponse(httpConn);
+            } else {
+                throw new HasException("Connection to getprincipals deined.");
+            }
+        } catch (Exception e) {
+            LOG.error("Fail to get principals." + e);
+            throw new HasException("Fail to get principals.", e);
+        }
+        return getPrincsList(response);
+    }
+
+    @Override
+    public List<String> getPrincipals(String exp) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "getprincipals?exp=" + exp);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "GET");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("GET");
+        } catch (ProtocolException e) {
+            LOG.error("Fail to get the principals with expression. " + e);
+            throw new HasException("Fail to get the principals with expression.", e);
+        }
+        String response;
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                response = getResponse(httpConn);
+            } else {
+                throw new HasException("Connection to getprincipals deined.");
+            }
+        } catch (Exception e) {
+            throw new HasException(e);
+        }
+        return getPrincsList(response);
+    }
+
+    private String getResponse(HttpURLConnection httpConn) throws Exception {
+        StringBuilder data = new StringBuilder();
+        BufferedReader br = new BufferedReader(new InputStreamReader(httpConn.getInputStream()));
+        String s;
+        while ((s = br.readLine()) != null) {
+            data.append(s);
+        }
+        return new JSONObject(data.toString()).getString("msg");
+    }
+}


[02/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hclient/HasClientLoginTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hclient/HasClientLoginTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hclient/HasClientLoginTool.java
new file mode 100644
index 0000000..f423a3b
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hclient/HasClientLoginTool.java
@@ -0,0 +1,269 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hclient;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.client.HasClient;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.util.HasJaasLoginUtil;
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.server.KdcConfig;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
+import org.apache.kerby.util.OSUtil;
+
+import javax.security.auth.Subject;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+public class HasClientLoginTool {
+    private static List<String> principalList = new ArrayList<String>();
+    private static List<File>  keytabList = new ArrayList<File>();
+
+    private static final String KEYTAB_USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\k=login-test.cmd" : "Usage: sh bin/login-test.sh")
+        + " [add|run|delete] [conf_dir] [work_dir] [number]\n"
+        + "\n";
+
+    private static final String TGT_USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\k=login-test.cmd" : "Usage: sh bin/login-test.sh")
+        + " tgt [conf_dir]\n"
+        + "\n";
+
+    private static void printKeytabUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(KEYTAB_USAGE);
+        System.exit(-1);
+    }
+
+    private static void printTgtUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(TGT_USAGE);
+        System.exit(-1);
+    }
+
+    public static class Task implements Runnable {
+        private int index;
+
+        Task(int index) {
+            this.index = index;
+        }
+
+        @Override
+        public void run() {
+            Subject subject = null;
+            try {
+                subject = HasJaasLoginUtil.loginUsingKeytab(principalList.get(index),
+                    keytabList.get(index));
+            } catch (IOException e) {
+                System.err.println("Fail to login using keytab. " + e);
+            }
+            System.out.println("Login succeeded for user: "
+                + subject.getPrincipals().iterator().next());
+        }
+    }
+
+    public static void main(String[] args) {
+
+        String cmd = args[0];
+        File confDir;
+        File workDir;
+
+        if (cmd.equals("tgt")) {
+            if (args.length != 2) {
+                printTgtUsage("Need 2 args.");
+                return;
+            }
+
+            confDir = new File(args[1]);
+            if (!confDir.exists()) {
+                printTgtUsage("Need the valid conf dir.");
+                return;
+            }
+            File confFile = new File(confDir, "hadmin.conf");
+            HasConfig hasConfig;
+            try {
+                hasConfig = HasUtil.getHasConfig(confFile);
+            } catch (HasException e) {
+                System.err.println(e.getMessage());
+                return;
+            }
+            if (hasConfig == null) {
+                System.err.println("hadmin.conf not exist in " + confDir.getAbsolutePath());
+                return;
+            }
+            String host = hasConfig.getHttpsHost();
+            String port = hasConfig.getHttpsPort();
+
+            HasClient hasClient = new HasClient();
+            TgtTicket tgtTicket;
+            try {
+                tgtTicket = hasClient.requestTgt();
+            } catch (HasException e) {
+                System.err.println("Errors occurred when getting TGT. " + e.getMessage());
+                return;
+            }
+
+            System.out.println("Get the tgt ticket successfully!");
+            System.out.println("The client principal of tgt ticket: " + tgtTicket.getClientPrincipal());
+
+            Subject subject = null;
+            try {
+                subject = HasJaasLoginUtil.loginUserFromTgtTicket(
+                    "https://" + host + ":" + port + "/has/v1?auth_type=RAM");
+            } catch (IOException e) {
+                System.err.println("Errors occurred when login user with TGT. " + e.getMessage());
+                return;
+            }
+
+            System.out.println("Principal: " + subject.getPrincipals().iterator().next());
+        } else {
+            if (args.length != 4) {
+                printKeytabUsage("Need 4 args.");
+                return;
+            }
+
+            confDir = new File(args[1]);
+            workDir = new File(args[2]);
+
+            if (!confDir.exists()) {
+                printKeytabUsage("Need the valid conf dir.");
+                return;
+            }
+            if (!workDir.exists()) {
+                printKeytabUsage("Need the valid work dir.");
+                return;
+            }
+
+            int taskNum = Integer.parseInt(args[3]);
+
+            System.out.println("The task num is: " + taskNum);
+
+            if (taskNum <= 0) {
+                printKeytabUsage("The task num must be greater than zero");
+                System.exit(-1);
+            }
+
+            HasAdminClient hasAdminClient;
+            HasAuthAdminClient authHasAdminClient = null;
+            File confFile = new File(confDir, "hadmin.conf");
+            HasConfig hasConfig = null;
+            try {
+                hasConfig = HasUtil.getHasConfig(confFile);
+            } catch (HasException e) {
+                System.err.println(e.getMessage());
+                return;
+            }
+
+            if (hasConfig == null) {
+                System.err.println("hadmin.conf not exist in " + confDir.getAbsolutePath());
+                return;
+            }
+
+            if (hasConfig.getFilterAuthType().equals("kerberos")) {
+                authHasAdminClient = new HasAuthAdminClient(hasConfig);
+            }
+            if (authHasAdminClient != null) {
+                hasAdminClient = authHasAdminClient;
+            } else {
+                hasAdminClient = new HasAdminClient(hasConfig);
+            }
+            String realm = null;
+            try {
+                KdcConfig kdcConfig = KdcUtil.getKdcConfig(confDir);
+                realm = kdcConfig.getKdcRealm();
+            } catch (KrbException e) {
+                printKeytabUsage(e.getMessage());
+            }
+
+            if (cmd.equals("add")) {
+                for (int i = 0; i < taskNum; i++) {
+                    String principal = "test" + i + "@" + realm;
+                    try {
+                        hasAdminClient.addPrincipal(principal);
+                    } catch (HasException e) {
+                        System.err.println("Errors occurred when adding principal. "
+                            + e.getMessage());
+                        return;
+                    }
+                    File keytabFile = new File(workDir, i + ".keytab");
+                    try {
+                        hasAdminClient.exportKeytab(keytabFile, principal);
+                    } catch (HasException e) {
+                        System.err.println("Errors occurred when exporting the keytabs. "
+                            + e.getMessage());
+                        return;
+                    }
+                    System.out.println("Add principals and keytabs successfully.");
+                }
+            } else if (cmd.equals("run")) {
+                ExecutorService exec;
+                for (int i = 0; i < taskNum; i++) {
+                    String principal = "test" + i + "@" + realm;
+                    principalList.add(i, principal);
+                    File file = new File(workDir, i + ".keytab");
+                    keytabList.add(i, file);
+                }
+                System.out.println("Start the login test.");
+                Long startTime = System.currentTimeMillis();
+                exec = Executors.newFixedThreadPool(5);
+                for (int i = 0; i < taskNum; ++i) {
+                    exec.submit(new Task(i));
+                }
+                exec.shutdown();
+                try {
+                    exec.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
+                } catch (InterruptedException e) {
+                    System.err.println(e.getMessage());
+                    return;
+                }
+                Long endTime = System.currentTimeMillis();
+                System.out.println("Finish the login test.");
+                System.out.println("Cost time: " + (endTime - startTime) + "ms");
+            } else if (cmd.equals("delete")) {
+                for (int i = 0; i < taskNum; i++) {
+                    String principal = "test" + i + "@" + realm;
+                    try {
+                        hasAdminClient.deletePrincipal(principal);
+                    } catch (HasException e) {
+                        System.err.println("Errors occurred when deleting the principal. "
+                            + e.getMessage());
+                        continue;
+                    }
+                    File file = new File(workDir, i + ".keytab");
+                    if (!file.delete()) {
+                        System.err.println("Failed to delete " + i + ".keytab.");
+                    }
+                }
+                System.out.println("Delete principals and keytabs successfully.");
+            } else {
+                printKeytabUsage("Need the cmd with add, run or delete.");
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/HasInitTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/HasInitTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/HasInitTool.java
new file mode 100644
index 0000000..1171d02
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/HasInitTool.java
@@ -0,0 +1,132 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.kdcinit;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.has.tool.client.kdcinit.cmd.*;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.util.OSUtil;
+
+import java.io.File;
+import java.util.Scanner;
+
+public class HasInitTool {
+    private static final String PROMPT = HasInitTool.class.getSimpleName();
+    private static final String USAGE = (OSUtil.isWindows()
+            ? "Usage: bin\\hadmin.cmd" : "Usage: sh bin/kdcinit.sh")
+            + " <conf-file>\n"
+            + "\tExample:\n"
+            + "\t\t"
+            + (OSUtil.isWindows()
+            ? "bin\\kdcinit.cmd" : "sh bin/kdcinit.sh")
+            + " conf\n";
+
+    private static final String LEGAL_COMMANDS = "Available commands are: "
+            + "\n"
+            + "get_krb5conf, getkrb5\n"
+            + "                         Get krb5.conf\n"
+            + "get_hasConf, gethas\n"
+            + "                         Get has-client.conf\n"
+            + "set_plugin, setplugin\n"
+            + "                         Set plugin\n"
+            + "config_kdcBackend, confbackend\n"
+            + "                         Config kdc backend\n"
+            + "config_kdc, confkdc\n"
+            + "                         Config kdc\n"
+            + "start_kdc, start\n"
+            + "                         Start kdc\n"
+            + "init_kdc, init\n"
+            + "                         Init kdc\n";
+
+    public static void main(String[] args) {
+        if (args.length < 1) {
+            System.err.println(USAGE);
+            System.exit(1);
+        }
+        String confDirPath = args[0];
+        File confFile = new File(confDirPath, "hadmin.conf");
+        HasConfig hasConfig;
+        try {
+            hasConfig = HasUtil.getHasConfig(confFile);
+        } catch (HasException e) {
+            System.err.println(e.getMessage());
+            return;
+        }
+
+        System.out.println(LEGAL_COMMANDS);
+        System.out.println("enter \"<cmd> [?][-help]\" to get cmd help.");
+        Scanner scanner = new Scanner(System.in, "UTF-8");
+        System.out.print(PROMPT + ": ");
+        String input = scanner.nextLine();
+
+        HasAdminClient hadmin = new HasAdminClient(hasConfig, new File(confDirPath));
+        while (!(input.equals("quit") || input.equals("exit") || input.equals("q"))) {
+            try {
+                execute(hadmin, input);
+            } catch (KrbException e) {
+                System.err.println(e.getMessage());
+            }
+            System.out.print(PROMPT + ": ");
+            input = scanner.nextLine();
+        }
+    }
+
+    private static void execute(HasAdminClient hadmin, String input) throws KrbException {
+        input = input.trim();
+        if (input.startsWith("cmd")) {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        String[] items = input.split("\\s+");
+        String cmd = items[0];
+
+        KdcInitCmd executor;
+        if (cmd.equals("get_krb5conf")
+                || cmd.equals("getkrb5")) {
+            executor = new HasGetKrb5confCmd(hadmin);
+        } else if (cmd.equals("get_hasConf")
+                || cmd.equals("gethas")) {
+            executor = new HasGetHasconfCmd(hadmin);
+        } else if (cmd.equals("set_plugin")
+                || cmd.equals("setplugin")) {
+            executor = new HasSetPluginCmd(hadmin);
+        } else if (cmd.equals("config_kdcBackend")
+                || cmd.equals("confbackend")) {
+            executor = new HasConfKdcBackendCmd(hadmin);
+        } else if (cmd.equals("config_kdc")
+                || cmd.equals("confkdc")) {
+            executor = new HasConfKdcCmd(hadmin);
+        } else if (cmd.equals("start_kdc")
+                || cmd.equals("start")) {
+            executor = new HasStartKdcCmd(hadmin);
+        } else if (cmd.equals("init_kdc")
+                || cmd.equals("init")) {
+            executor = new HasInitKdcCmd(hadmin);
+        } else {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        executor.execute(items);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java
new file mode 100644
index 0000000..7423cbf
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java
@@ -0,0 +1,66 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.kdcinit.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+/**
+ * Remote config kdc cmd
+ */
+public class HasConfKdcBackendCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: config_kdcBackend <backendType> [dir] [url] [user]"
+        + " [password]\n"
+        + "\tSupported backendType : json,mysql\n"
+        + "\tExample:\n"
+        + "\t\tconfig_kdcBackend json /tmp/has/jsonbackend \n"
+        + "\t\tconfig_kdcBackend mysql jdbc:mysql://127.0.0.1:3306/mysqlbackend root passwd\n";
+
+    public HasConfKdcBackendCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        if (items.length < 3) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient = getHadmin();
+        if (items.length >= 3 && items[1].equals("json")) {
+            hasAdminClient.configKdcBackend(items[1], items[2],
+                    null, null, null);
+        } else if (items.length >= 5 && items[1].equals("mysql")) {
+            hasAdminClient.configKdcBackend(items[1], null,
+                    items[2], items[3], items[4]);
+        } else {
+            System.err.println(USAGE);
+            return;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java
new file mode 100644
index 0000000..ce73dce
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java
@@ -0,0 +1,54 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.kdcinit.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+/**
+ * Remote config kdc cmd
+ */
+public class HasConfKdcCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: config_kdc <host> <port> <realm>\n"
+        + "\tExample:\n"
+        + "\t\tconfig_kdc localhost 88 HADOOP.COM\n";
+
+    public HasConfKdcCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        if (items.length < 4) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient = getHadmin();
+        hasAdminClient.configKdc(items[2], items[3], items[1]);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java
new file mode 100644
index 0000000..efa92f6
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java
@@ -0,0 +1,77 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.kdcinit.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+
+/**
+ * Remote get has-client.conf cmd
+ */
+public class HasGetHasconfCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: get_hasConf [-p] [path]\n"
+        + "\tExample:\n"
+        + "\t\tget_hasConf\n";
+
+    public HasGetHasconfCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        File path = getHadmin().getConfDir();
+        if (items.length >= 3 && items[1].startsWith("-p")) {
+            path = new File(items[2]);
+            if (!path.exists()) {
+                if (!path.mkdirs()) {
+                    System.err.println("Cannot create file : " + items[2]);
+                    return;
+                }
+            }
+        }
+        File hasConf = new File(path, "has-client.conf");
+
+        HasAdminClient hasAdminClient = getHadmin();
+        String content = hasAdminClient.getHasconf();
+        if (content == null) {
+            System.err.println("Failed to get has.conf.");
+            return;
+        }
+        try {
+            PrintStream ps = new PrintStream(new FileOutputStream(hasConf));
+            ps.println(content);
+            System.out.println("has-client.conf has saved in : " + hasConf.getAbsolutePath());
+        } catch (FileNotFoundException e) {
+            System.err.println(e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java
new file mode 100644
index 0000000..bbe93cf
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java
@@ -0,0 +1,77 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.kdcinit.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+
+/**
+ * Remote get krb5.conf cmd
+ */
+public class HasGetKrb5confCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: get_krb5conf [-p] [path]\n"
+        + "\tExample:\n"
+        + "\t\tget_krb5conf -p /tmp/has\n";
+
+    public HasGetKrb5confCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        File path = getHadmin().getConfDir();
+        if (items.length >= 3 && items[1].startsWith("-p")) {
+            path = new File(items[2]);
+            if (!path.exists()) {
+                if (!path.mkdirs()) {
+                    System.err.println("Cannot create file : " + items[2]);
+                    return;
+                }
+            }
+        }
+        File krb5Conf = new File(path, "krb5.conf");
+
+        HasAdminClient hasAdminClient = getHadmin();
+        String content = hasAdminClient.getKrb5conf();
+        if (content == null) {
+            System.err.println("Failed to get krb5.conf.");
+            return;
+        }
+        try {
+            PrintStream ps = new PrintStream(new FileOutputStream(krb5Conf));
+            ps.println(content);
+            System.out.println("krb5.conf has saved in : " + krb5Conf.getAbsolutePath());
+        } catch (FileNotFoundException e) {
+            System.err.println(e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java
new file mode 100644
index 0000000..895b10a
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java
@@ -0,0 +1,94 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.kdcinit.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Remote init kdc cmd
+ */
+public class HasInitKdcCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: init_kdc [-p] [path]\n"
+        + "\tExample:\n"
+        + "\t\tinit_kdc\n";
+
+    public HasInitKdcCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        File path = getHadmin().getConfDir();
+        if (items.length >= 3 && items[1].startsWith("-p")) {
+            path = new File(items[2]);
+            if (!path.exists()) {
+                if (!path.mkdirs()) {
+                    System.err.println("Cannot create file : " + items[2]);
+                    return;
+                }
+            }
+        }
+        File hadminKeytab = new File(path, "admin.keytab");
+
+        HasAdminClient hasAdminClient = getHadmin();
+        InputStream content = hasAdminClient.initKdc();
+
+        if (content == null) {
+            System.err.println("Failed to init kdc.");
+            return;
+        }
+
+        FileOutputStream fos = null;
+        try {
+            fos = new FileOutputStream(hadminKeytab);
+        } catch (FileNotFoundException e) {
+            System.err.println("the admin keytab file not found. " + e.getMessage());
+        }
+        byte[] buffer = new byte[4 * 1024];
+        int read;
+        try {
+            while ((read = content.read(buffer)) > 0) {
+                fos.write(buffer, 0, read);
+            }
+            fos.close();
+            content.close();
+        } catch (IOException e) {
+            System.err.println("Errors occurred when getting the admin.keytab. " + e.getMessage());
+        }
+
+        System.out.println("admin.keytab has saved in : " + hadminKeytab.getAbsolutePath()
+            + ",\nplease safely save it to use hadmin.");
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java
new file mode 100644
index 0000000..a06230b
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java
@@ -0,0 +1,53 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.kdcinit.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+/**
+ * Remote set plugin cmd
+ */
+public class HasSetPluginCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: set_plugin <plugin>\n"
+        + "\tExample:\n"
+        + "\t\tset_plugin RAM\n";
+
+    public HasSetPluginCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        } else {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient = getHadmin();
+        hasAdminClient.setPlugin(items[1]);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java
new file mode 100644
index 0000000..466cee7
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.kdcinit.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+/**
+ * Remote start kdc cmd
+ */
+public class HasStartKdcCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: start_kdc\n"
+        + "\tExample:\n"
+        + "\t\tstart\n";
+
+    public HasStartKdcCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        HasAdminClient hasAdminClient = getHadmin();
+        hasAdminClient.startKdc();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/KdcInitCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/KdcInitCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/KdcInitCmd.java
new file mode 100644
index 0000000..310cfa3
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kdcinit/cmd/KdcInitCmd.java
@@ -0,0 +1,42 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.kdcinit.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+public abstract class KdcInitCmd {
+
+    private HasAdminClient hadmin;
+
+    public KdcInitCmd(HasAdminClient hadmin) {
+        this.hadmin = hadmin;
+    }
+
+    protected HasAdminClient getHadmin() {
+        return hadmin;
+    }
+
+    /**
+     * Execute the kdc init cmd.
+     * @param input Input cmd to execute
+     */
+    public abstract void execute(String[] input) throws KrbException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kinit/KinitOption.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kinit/KinitOption.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kinit/KinitOption.java
new file mode 100644
index 0000000..f96fa7c
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kinit/KinitOption.java
@@ -0,0 +1,88 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License. 
+ *
+ */
+package org.apache.kerby.has.tool.client.kinit;
+
+import org.apache.kerby.KOption;
+import org.apache.kerby.KOptionInfo;
+import org.apache.kerby.KOptionType;
+import org.apache.kerby.kerberos.kerb.client.KrbOptionGroup;
+
+public enum KinitOption implements KOption {
+    NONE(null),
+
+    CLIENT_PRINCIPAL(new KOptionInfo("client-principal", "Client principal",
+        KrbOptionGroup.KRB, KOptionType.STR)),
+    LIFE_TIME(new KOptionInfo("-l", "lifetime",
+        KrbOptionGroup.KRB, KOptionType.DURATION)),
+    START_TIME(new KOptionInfo("-s", "start time",
+        KrbOptionGroup.KRB, KOptionType.DURATION)),
+    RENEWABLE_LIFE(new KOptionInfo("-r", "renewable lifetime",
+        KrbOptionGroup.KRB, KOptionType.DURATION)),
+    FORWARDABLE(new KOptionInfo("-f", "forwardable",
+        KrbOptionGroup.KDC_FLAGS)),
+    NOT_FORWARDABLE(new KOptionInfo("-F", "not forwardable",
+        KrbOptionGroup.KDC_FLAGS)),
+    PROXIABLE(new KOptionInfo("-p", "proxiable",
+        KrbOptionGroup.KDC_FLAGS)),
+    NOT_PROXIABLE(new KOptionInfo("-P", "not proxiable",
+        KrbOptionGroup.KDC_FLAGS)),
+    RENEW(new KOptionInfo("-R", "renew",
+        KrbOptionGroup.KDC_FLAGS)),
+    USE_PASSWD(new KOptionInfo("using-password", "using password",
+        KrbOptionGroup.KRB)),
+    USER_PASSWD(new KOptionInfo("user-passwd", "User plain password",
+        KrbOptionGroup.KRB)),
+    USE_KEYTAB(new KOptionInfo("-k", "use keytab",
+        KrbOptionGroup.KRB)),
+    USE_DFT_KEYTAB(new KOptionInfo("-i", "use default client keytab (with -k)",
+        KrbOptionGroup.KRB)),
+    KEYTAB_FILE(new KOptionInfo("-t", "filename of keytab to use",
+        KrbOptionGroup.KRB, KOptionType.FILE)),
+    KRB5_CACHE(new KOptionInfo("-c", "Kerberos 5 cache name",
+        KrbOptionGroup.KRB, KOptionType.STR)),
+    SERVICE(new KOptionInfo("-S", "service",
+        KrbOptionGroup.KRB, KOptionType.STR)),
+
+    CONF_DIR(new KOptionInfo("-conf", "conf dir", KrbOptionGroup.KRB, KOptionType.DIR));
+
+    private final KOptionInfo optionInfo;
+
+    KinitOption(KOptionInfo optionInfo) {
+        this.optionInfo = optionInfo;
+    }
+
+    @Override
+    public KOptionInfo getOptionInfo() {
+        return optionInfo;
+    }
+
+    public static KinitOption fromName(String name) {
+        if (name != null) {
+            for (KinitOption ko : values()) {
+                if (ko.optionInfo != null
+                        && ko.optionInfo.getName().equals(name)) {
+                    return ko;
+                }
+            }
+        }
+        return NONE;
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kinit/KinitTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kinit/KinitTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kinit/KinitTool.java
new file mode 100644
index 0000000..f95fe91
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/kinit/KinitTool.java
@@ -0,0 +1,384 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License. 
+ *
+ */
+package org.apache.kerby.has.tool.client.kinit;
+
+import org.apache.kerby.KOption;
+import org.apache.kerby.KOptionGroup;
+import org.apache.kerby.KOptionInfo;
+import org.apache.kerby.KOptionType;
+import org.apache.kerby.KOptions;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.client.KrbClient;
+import org.apache.kerby.kerberos.kerb.client.KrbKdcOption;
+import org.apache.kerby.kerberos.kerb.client.KrbOption;
+import org.apache.kerby.kerberos.kerb.client.KrbOptionGroup;
+import org.apache.kerby.kerberos.kerb.client.PkinitOption;
+import org.apache.kerby.kerberos.kerb.client.TokenOption;
+import org.apache.kerby.kerberos.kerb.type.ticket.SgtTicket;
+import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
+import org.apache.kerby.util.OSUtil;
+import org.apache.kerby.util.SysUtil;
+
+import java.io.Console;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.Scanner;
+
+/**
+ * kinit like tool
+ *
+ * Ref. MIT kinit command tool usage.
+ */
+public class KinitTool {
+
+    private static final String USAGE = (OSUtil.isWindows()
+            ? "Usage: bin\\kinit.cmd" : "Usage: sh bin/kinit.sh")
+            + " <-conf conf_dir> [-V] [-l lifetime] [-s start_time]\n"
+            + "\t\t[-r renewable_life] [-f | -F] [-p | -P] -n [-a | -A] [-C] [-E]\n"
+            + "\t\t[-v] [-R] [-k [-i|-t keytab_file]] [-c cachename]\n"
+            + "\t\t[-S service_name] [-T ticket_armor_cache]\n"
+            + "\t\t[-X <attribute>[=<value>]] <principal>\n\n"
+            + "\tDESCRIPTION:\n"
+            + "\t\tkinit obtains and caches an initial ticket-granting ticket for principal.\n\n"
+            + "\tOPTIONS:\n"
+            + "\t\t-V verbose\n"
+            + "\t\t-l lifetime\n"
+            + "\t\t-s start time\n"
+            + "\t\t-r renewable lifetime\n"
+            + "\t\t-f forwardable\n"
+            + "\t\t-F not forwardable\n"
+            + "\t\t-p proxiable\n"
+            + "\t\t-P not proxiable\n"
+            + "\t\t-n anonymous\n"
+            + "\t\t-a include addresses\n"
+            + "\t\t-A do not include addresses\n"
+            + "\t\t-v validate\n"
+            + "\t\t-R renew\n"
+            + "\t\t-C canonicalize\n"
+            + "\t\t-E client is enterprise principal name\n"
+            + "\t\t-k use keytab\n"
+            + "\t\t-i use default client keytab (with -k)\n"
+            + "\t\t-t filename of keytab to use\n"
+            + "\t\t-c Kerberos 5 cache name\n"
+            + "\t\t-S service\n"
+            + "\t\t-T armor credential cache\n"
+            + "\t\t-X <attribute>[=<value>]\n"
+            + "\n";
+
+    private static void printUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(USAGE);
+        System.exit(-1);
+    }
+
+    private static final String KVNO_USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\kinit.cmd" : "Usage: sh bin/kinit.sh")
+        + " <-conf conf_dir> <-c cachename> <-S service_name>\n\n"
+        + "\tDESCRIPTION:\n"
+        + "\t\tkinit obtains a service ticket for the specified principal and prints out the key version number.\n"
+        + "\n";
+
+    private static void printKvnoUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(KVNO_USAGE);
+        System.exit(-1);
+    }
+
+    /**
+     * Get password for the input principal from console
+     */
+    private static String getPassword(String principal) {
+        Console console = System.console();
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                    + "maybe you're running this from within an IDE. "
+                    + "Use scanner to read password.");
+            System.out.println("Password for " + principal + ":");
+            try (Scanner scanner = new Scanner(System.in, "UTF-8")) {
+                return scanner.nextLine().trim();
+            }
+        }
+        console.printf("Password for " + principal + ":");
+        char[] passwordChars = console.readPassword();
+        String password = new String(passwordChars).trim();
+        Arrays.fill(passwordChars, ' ');
+
+        return password;
+    }
+
+    private static void requestTicket(String principal, KOptions ktOptions) {
+        ktOptions.add(KinitOption.CLIENT_PRINCIPAL, principal);
+
+        File confDir = null;
+        if (ktOptions.contains(KinitOption.CONF_DIR)) {
+            confDir = ktOptions.getDirOption(KinitOption.CONF_DIR);
+        }
+
+        KrbClient krbClient = null;
+        try {
+            krbClient = getClient(confDir);
+        } catch (KrbException e) {
+            System.err.println("Create krbClient failed: " + e.getMessage());
+            System.exit(1);
+        }
+
+        if (ktOptions.contains(KinitOption.RENEW)) {
+            if (ktOptions.contains(KinitOption.KRB5_CACHE)) {
+                String ccName = ktOptions.getStringOption(KinitOption.KRB5_CACHE);
+                File ccFile = new File(ccName);
+
+                SgtTicket sgtTicket = null;
+                try {
+                    sgtTicket = krbClient.requestSgt(ccFile, null);
+                } catch (KrbException e) {
+                    System.err.println("kinit: " + e.getKrbErrorCode().getMessage());
+                }
+
+                try {
+                    krbClient.renewTicket(sgtTicket, ccFile);
+                } catch (KrbException e) {
+                    System.err.println("kinit: " + e.getKrbErrorCode().getMessage());
+                }
+
+                System.out.println("Successfully renewed.");
+            }
+            return;
+        }
+
+        if (ktOptions.contains(KinitOption.SERVICE) && ktOptions.contains(KinitOption.KRB5_CACHE)) {
+            String ccName = ktOptions.getStringOption(KinitOption.KRB5_CACHE);
+            File ccFile = new File(ccName);
+            if (ccFile.exists()) {
+                System.out.println("Use credential cache to request a service ticket.");
+                String servicePrincipal = ktOptions.getStringOption(KinitOption.SERVICE);
+                SgtTicket sgtTicket = null;
+                try {
+                    sgtTicket = krbClient.requestSgt(ccFile, servicePrincipal);
+                } catch (KrbException e) {
+                    System.err.println("Kinit: get service ticket failed: " + e.getMessage());
+                    System.exit(1);
+                }
+
+                try {
+                    krbClient.storeTicket(sgtTicket, ccFile);
+                } catch (KrbException e) {
+                    System.err.println("Kinit: store ticket failed: " + e.getMessage());
+                    System.exit(1);
+                }
+
+                System.out.println(sgtTicket.getEncKdcRepPart().getSname().getName() + ": knvo = "
+                    + sgtTicket.getTicket().getEncryptedEncPart().getKvno());
+                return;
+            }
+        }
+
+        if (!ktOptions.contains(KinitOption.USE_KEYTAB)) {
+            //If not request tickets by keytab than by password.
+            ktOptions.add(KinitOption.USE_PASSWD);
+            String password = getPassword(principal);
+            ktOptions.add(KinitOption.USER_PASSWD, password);
+        }
+
+        TgtTicket tgt = null;
+        try {
+            tgt = krbClient.requestTgt(convertOptions(ktOptions));
+        } catch (KrbException e) {
+            System.err.println("Authentication failed: " + e.getMessage());
+            System.exit(1);
+        }
+
+        File ccacheFile;
+        if (ktOptions.contains(KinitOption.KRB5_CACHE)) {
+            String ccacheName = ktOptions.getStringOption(KinitOption.KRB5_CACHE);
+            ccacheFile = new File(ccacheName);
+        } else {
+            String ccacheName = getCcacheName(krbClient);
+            ccacheFile = new File(ccacheName);
+        }
+
+        try {
+            krbClient.storeTicket(tgt, ccacheFile);
+        } catch (KrbException e) {
+            System.err.println("Store ticket failed: " + e.getMessage());
+            System.exit(1);
+        }
+
+        System.out.println("Successfully requested and stored ticket in "
+            + ccacheFile.getAbsolutePath());
+
+        if (ktOptions.contains(KinitOption.SERVICE)) {
+            System.out.println("Use tgt to request a service ticket.");
+            String servicePrincipal = ktOptions.getStringOption(KinitOption.SERVICE);
+            SgtTicket sgtTicket;
+            try {
+                sgtTicket = krbClient.requestSgt(tgt, servicePrincipal);
+            } catch (KrbException e) {
+                System.err.println("kinit: " + e.getKrbErrorCode().getMessage());
+                return;
+            }
+
+            System.out.println(sgtTicket.getEncKdcRepPart().getSname().getName() + ": knvo = "
+                + sgtTicket.getTicket().getEncryptedEncPart().getKvno());
+        }
+    }
+
+    /**
+     * Init the client.
+     */
+    private static KrbClient getClient(File confDir) throws KrbException {
+        KrbClient krbClient;
+
+        if (confDir != null) {
+            krbClient = new KrbClient(confDir);
+        } else {
+            krbClient = new KrbClient();
+        }
+
+        krbClient.init();
+        return krbClient;
+    }
+
+    /**
+     * Get credential cache file name if not specified.
+     */
+    private static String getCcacheName(KrbClient krbClient) {
+        final String ccacheNameEnv = System.getenv("KRB5CCNAME");
+        final String ccacheNameConf = krbClient.getSetting().getKrbConfig().getString("default_ccache_name");
+        String ccacheName;
+        if (ccacheNameEnv != null) {
+            ccacheName = ccacheNameEnv;
+        } else if (ccacheNameConf != null) {
+            ccacheName = ccacheNameConf;
+        } else {
+            StringBuilder uid = new StringBuilder();
+            try {
+                //Get UID through "id -u" command
+                String command = "id -u";
+                Process child = Runtime.getRuntime().exec(command);
+                InputStream in = child.getInputStream();
+                int c;
+                while ((c = in.read()) != -1) {
+                    uid.append((char) c);
+                }
+                in.close();
+            } catch (IOException e) {
+                System.err.println("Failed to get UID.");
+                System.exit(1);
+            }
+            ccacheName = "krb5cc_" + uid.toString().trim();
+            ccacheName = SysUtil.getTempDir().toString() + "/" + ccacheName;
+        }
+
+        return ccacheName;
+    }
+
+    public static void main(String[] args) {
+        KOptions ktOptions = new KOptions();
+        KinitOption kto;
+        String principal = null;
+
+        int i = 0;
+        String opt, param, error;
+        while (i < args.length) {
+            error = null;
+
+            opt = args[i++];
+            if (opt.startsWith("-")) {
+                kto = KinitOption.fromName(opt);
+                if (kto == KinitOption.NONE) {
+                    error = "Invalid option:" + opt;
+                    System.err.println(error);
+                    break;
+                }
+            } else {
+                principal = opt;
+                kto = KinitOption.NONE;
+            }
+
+            if (kto != KinitOption.NONE && kto.getOptionInfo().getType() != KOptionType.NOV) {
+                // require a parameter
+                param = null;
+                if (i < args.length) {
+                    param = args[i++];
+                }
+                if (param != null) {
+                    KOptions.parseSetValue(kto.getOptionInfo(), param);
+                } else {
+                    error = "Option " + opt + " require a parameter";
+                }
+            }
+
+            if (error != null) {
+                printUsage(error);
+            }
+            if (kto != KinitOption.NONE) {
+                ktOptions.add(kto);
+            }
+        }
+
+        if (!ktOptions.contains(KinitOption.CONF_DIR)) {
+            printUsage("No conf dir given.");
+        }
+
+        if (principal == null) {
+            if (!ktOptions.contains(KinitOption.SERVICE) && !ktOptions.contains(KinitOption.KRB5_CACHE)) {
+                printUsage("No principal is specified");
+            } else if (ktOptions.contains(KinitOption.SERVICE) && !ktOptions.contains(KinitOption.KRB5_CACHE)) {
+                printKvnoUsage("No credential cache file given.");
+            }
+        }
+
+        requestTicket(principal, ktOptions);
+        System.exit(0);
+    }
+
+    /**
+     * Convert kinit tool options to KOptions.
+     * @param toolOptions
+     * @return KOptions
+     */
+    static KOptions convertOptions(KOptions toolOptions) {
+        KOptions results = new KOptions();
+
+        for (KOption toolOpt : toolOptions.getOptions()) {
+            KOptionInfo kOptionInfo = toolOpt.getOptionInfo();
+            KOptionGroup group = kOptionInfo.getGroup();
+            KOption kOpt = null;
+
+            if (group == KrbOptionGroup.KRB) {
+                kOpt = KrbOption.fromOptionName(kOptionInfo.getName());
+            } else if (group == KrbOptionGroup.PKINIT) {
+                kOpt = PkinitOption.fromOptionName(kOptionInfo.getName());
+            } else if (group == KrbOptionGroup.TOKEN) {
+                kOpt = TokenOption.fromOptionName(kOptionInfo.getName());
+            } else if (group == KrbOptionGroup.KDC_FLAGS) {
+                kOpt = KrbKdcOption.fromOptionName(kOptionInfo.getName());
+            }
+            if (kOpt != null && kOpt.getOptionInfo() != KrbOption.NONE.getOptionInfo()) {
+                kOpt.getOptionInfo().setValue(toolOpt.getOptionInfo().getValue());
+                results.add(kOpt);
+            }
+        }
+
+        return results;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/klist/KlistOption.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/klist/KlistOption.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/klist/KlistOption.java
new file mode 100644
index 0000000..b43ddea
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/klist/KlistOption.java
@@ -0,0 +1,66 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.klist;
+
+import org.apache.kerby.KOption;
+import org.apache.kerby.KOptionInfo;
+import org.apache.kerby.KOptionType;
+
+public enum KlistOption implements KOption {
+    NONE(null),
+    CREDENTIALS_CACHE(new KOptionInfo("-c", "specifies path of credentials cache",
+        KOptionType.STR)),
+    KEYTAB(new KOptionInfo("-k", "specifies keytab")),
+    DEFAULT_CLIENT_KEYTAB(new KOptionInfo("-i", "uses default client keytab if no name given")),
+    LIST_CREDENTIAL_CACHES(new KOptionInfo("-l", "list credential caches in collection")),
+    ALL_CREDENTIAL_CACHES(new KOptionInfo("-A", "shows content of all credential caches")),
+    ENCRYPTION_TYPE(new KOptionInfo("-e", "shows encryption type")),
+    KERBEROS_VERSION(new KOptionInfo("-V", "shows Kerberos version")),
+    AUTHORIZATION_DATA_TYPE(new KOptionInfo("-d", "shows the submitted authorization data type")),
+    CREDENTIALS_FLAGS(new KOptionInfo("-f", "show credential flags")),
+    EXIT_TGT_EXISTENCE(new KOptionInfo("-s", "sets exit status based on valid tgt existence")),
+    DISPL_ADDRESS_LIST(new KOptionInfo("-a", "displays the address list")),
+    NO_REVERSE_RESOLVE(new KOptionInfo("-n", "do not reverse resolve")),
+    SHOW_KTAB_ENTRY_TS(new KOptionInfo("-t", "shows keytab entry timestamps")),
+    SHOW_KTAB_ENTRY_KEY(new KOptionInfo("-K", "show keytab entry keys"));
+
+    private final KOptionInfo optionInfo;
+
+    KlistOption(KOptionInfo optionInfo) {
+        this.optionInfo = optionInfo;
+    }
+
+    @Override
+    public KOptionInfo getOptionInfo() {
+        return optionInfo;
+    }
+
+    public static KlistOption fromName(String name) {
+        if (name != null) {
+            for (KlistOption ko : values()) {
+                if (ko.optionInfo != null
+                        && ko.optionInfo.getName().equals(name)) {
+                    return ko;
+                }
+            }
+        }
+        return NONE;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/klist/KlistTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/klist/KlistTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/klist/KlistTool.java
new file mode 100644
index 0000000..64f3315
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/klist/KlistTool.java
@@ -0,0 +1,293 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.klist;
+
+import org.apache.kerby.KOptionType;
+import org.apache.kerby.KOptions;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.ccache.Credential;
+import org.apache.kerby.kerberos.kerb.ccache.CredentialCache;
+import org.apache.kerby.kerberos.kerb.client.KrbClient;
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
+import org.apache.kerby.util.HexUtil;
+import org.apache.kerby.util.OSUtil;
+import org.apache.kerby.util.SysUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+
+/**
+ * klist like tool
+ *
+ * Ref. MIT klist command tool usage.
+ */
+public class KlistTool {
+    private static final Logger LOG = LoggerFactory.getLogger(KlistTool.class);
+
+    private static final String USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\klist.cmd" : "Usage: sh bin/klist.sh")
+            + " [-e] [-V] [[-c] [-l] [-A] [-d] [-f] [-s] "
+            + "[-a [-n]]] [-k [-t] [-K]] [name]\n"
+            + "\t-c specifies credentials cache\n"
+            + "\t-k specifies keytab\n"
+            + "\t   (Default is credentials cache)\n"
+            + "\t-i uses default client keytab if no name given\n"
+            + "\t-l lists credential caches in collection\n"
+            + "\t-A shows content of all credential caches\n"
+            + "\t-e shows the encryption type\n"
+            + "\t-V shows the Kerberos version and exits\n"
+            + "\toptions for credential caches:\n"
+            + "\t\t-d shows the submitted authorization data types\n"
+            + "\t\t-f shows credentials flags\n"
+            + "\t\t-s sets exit status based on valid tgt existence\n"
+            + "\t\t-a displays the address list\n"
+            + "\t\t\t-n do not reverse-resolve\n"
+            + "\toptions for keytabs:\n"
+            + "\t\t-t shows keytab entry timestamps\n"
+            + "\t\t-K shows keytab entry keys\n";
+
+    // option "-k" hava a optional parameter, "/etc/krb5.keytab" if not specified
+    private static String keytabFilePath = null;
+
+    private static void printUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(USAGE);
+        System.exit(-1);
+    }
+
+    private static int printCredentialCacheInfo(KOptions klOptions) {
+        CredentialCache cc = new CredentialCache();
+        List<Credential> credentials;
+        InputStream cis = null;
+        String fileName;
+
+        if (!klOptions.contains(KlistOption.CREDENTIALS_CACHE)) {
+            fileName = getCcacheName();
+        } else {
+            fileName = klOptions.getStringOption(KlistOption.CREDENTIALS_CACHE);
+        }
+        try {
+            cis = Files.newInputStream(Paths.get(fileName));
+            cc.load(cis);
+        } catch (IOException e) {
+            LOG.error("Failed to open CredentialCache from file: " + fileName + ". " + e.toString());
+        } finally {
+            try {
+                if (cis != null) {
+                    cis.close();
+                }
+            } catch (IOException e) {
+                LOG.warn("Fail to close input stream. " + e);
+            }
+        }
+
+        if (cc != null) {
+            credentials = cc.getCredentials();
+
+            System.out.println("Ticket cache: " + fileName);
+            System.out.println("Default principal: " + cc.getPrimaryPrincipal().getName());
+
+            if (credentials.isEmpty()) {
+                System.out.println("No credential has been cached.");
+            } else {
+                DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+                System.out.println("Valid starting\t\tExpires\t\t\tService principal");
+
+                for (Credential crd : credentials) {
+                    System.out.println(df.format(crd.getStartTime().getTime()) + "\t"
+                        + df.format(crd.getEndTime().getTime()) + "\t"
+                        + crd.getServerName() + "\n"
+                        + "\t" + "renew until" + "\t" + df.format(crd.getRenewTill().getTime()));
+                }
+            }
+        }
+
+        return 0;
+    }
+
+    /**
+     * Get credential cache file name if not specified.
+     */
+    private static String getCcacheName() {
+        String ccacheName;
+        String ccacheNameEnv = System.getenv("KRB5CCNAME");
+        String ccacheNameConf = null;
+        File confDir = new File("/etc");
+        try {
+            KrbClient krbClient = new KrbClient(confDir);
+            ccacheNameConf = krbClient.getSetting().getKrbConfig().getString("default_ccache_name");
+        } catch (KrbException e) {
+            System.err.println("Create krbClient failed: " + e.getMessage());
+            System.exit(1);
+        }
+        if (ccacheNameEnv != null) {
+            ccacheName = ccacheNameEnv;
+        } else if (ccacheNameConf != null) {
+            ccacheName = ccacheNameConf;
+        } else {
+            StringBuilder uid = new StringBuilder();
+            try {
+                //Get UID through "id -u" command
+                String command = "id -u";
+                Process child = Runtime.getRuntime().exec(command);
+                InputStream in = child.getInputStream();
+                int c;
+                while ((c = in.read()) != -1) {
+                    uid.append((char) c);
+                }
+                in.close();
+            } catch (IOException e) {
+                System.err.println("Failed to get UID.");
+                System.exit(1);
+            }
+            ccacheName = "krb5cc_" + uid.toString().trim();
+            ccacheName = SysUtil.getTempDir().toString() + "/" + ccacheName;
+        }
+
+        return ccacheName;
+    }
+
+    private static int printKeytabInfo(KOptions klOptions) {
+        String[] header = new String[4];
+        header[0] = "KVNO Principal\n"
+                + "---- --------------------------------------------------------------------------";
+        header[1] = header[0];
+        header[2] = "KVNO Timestamp           Principal\n"
+                + "---- ------------------- ------------------------------------------------------";
+        header[3] = header[2];
+        int outputIndex = 0;
+        if (klOptions.contains(KlistOption.SHOW_KTAB_ENTRY_TS)) {
+            outputIndex |= 2;
+        }
+        if (klOptions.contains(KlistOption.SHOW_KTAB_ENTRY_KEY)) {
+            outputIndex |= 1;
+        }
+        System.out.println("Keytab name: FILE:" + keytabFilePath);
+        try {
+            File keytabFile = new File(keytabFilePath);
+            if (!keytabFile.exists()) {
+                System.out.println("klist: Key table file '" + keytabFilePath + "' not found. ");
+                return 0;
+            }
+            System.out.println(header[outputIndex]);
+            SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss");
+            Keytab keytab = Keytab.loadKeytab(keytabFile);
+            List<PrincipalName> principals = keytab.getPrincipals();
+            for (PrincipalName principal : principals) {
+                List<KeytabEntry> keytabEntries = keytab.getKeytabEntries(principal);
+                for (KeytabEntry entry : keytabEntries) {
+                    StringBuilder sb = new StringBuilder();
+                    sb.append(String.format("%-4d ", entry.getKvno()));
+                    if ((outputIndex & 2) != 0) {
+                        Date date = new Date(entry.getTimestamp().getTime());
+                        sb.append(format.format(date));
+                        sb.append(' ');
+                    }
+                    sb.append(String.format("%s ", principal.getName()));
+                    if ((outputIndex & 1) != 0) {
+                        sb.append("(0x");
+                        sb.append(HexUtil.bytesToHex(entry.getKey().getKeyData()));
+                        sb.append(")");
+                    }
+                    System.out.println(sb);
+                }
+            }
+
+        } catch (IOException e) {
+            System.err.println("klist: Error while scan key table file '" + keytabFilePath + "'");
+        }
+        return 0;
+    }
+
+    private static int printInfo(KOptions klOptions) {
+        if (klOptions.contains(KlistOption.KEYTAB)) {
+            return printKeytabInfo(klOptions);
+        }
+        return printCredentialCacheInfo(klOptions);
+    }
+
+    public static void main(String[] args) throws Exception {
+        KOptions klOptions = new KOptions();
+        KlistOption klopt;
+        // String name = null;
+
+        int i = 0;
+        String opt, value, error;
+        while (i < args.length) {
+            error = null;
+            opt = args[i++];
+
+            if (opt.startsWith("-")) {
+                klopt = KlistOption.fromName(opt);
+                if (klopt == KlistOption.NONE) {
+                    error = "Invalid option:" + opt;
+                }
+            } else {
+                if (keytabFilePath == null && klOptions.contains(KlistOption.KEYTAB)) {
+                    keytabFilePath = opt;
+                }
+                break;
+            }
+
+            if (error == null && klopt.getOptionInfo().getType() != KOptionType.NOV) {
+                //needs value for this parameter
+                value = null;
+                if (i < args.length) {
+                    value = args[i++];
+                }
+                if (value != null) {
+                    KOptions.parseSetValue(klopt.getOptionInfo(), value);
+                } else {
+                    error = "Option" + klopt + "requires a following value";
+                }
+            }
+
+            if (error != null) {
+                printUsage(error);
+            }
+
+            klOptions.add(klopt);
+            if (klOptions.contains(KlistOption.KEYTAB)
+                && klOptions.contains(KlistOption.CREDENTIALS_CACHE)) {
+                error = "Can not use '-c' and '-k' at the same time ";
+                printUsage(error);
+            }
+        }
+
+        if (keytabFilePath == null) {
+            keytabFilePath = "/etc/krb5.keytab";
+        }
+
+        int errNo = KlistTool.printInfo(klOptions);
+        System.exit(errNo);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/pom.xml b/has/has-tool/has-server-tool/pom.xml
index 426eacf..0f634a6 100644
--- a/has/has-tool/has-server-tool/pom.xml
+++ b/has/has-tool/has-server-tool/pom.xml
@@ -4,7 +4,7 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
         <artifactId>has-tool</artifactId>
-        <groupId>org.apache.hadoop</groupId>
+        <groupId>org.apache.kerby</groupId>
         <version>1.0.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
@@ -13,12 +13,12 @@
 
     <dependencies>
       <dependency>
-        <groupId>org.apache.hadoop</groupId>
+        <groupId>org.apache.kerby</groupId>
         <artifactId>has-server</artifactId>
         <version>${project.version}</version>
       </dependency>
       <dependency>
-        <groupId>org.apache.hadoop</groupId>
+        <groupId>org.apache.kerby</groupId>
         <artifactId>has-plugins</artifactId>
         <version>${project.version}</version>
       </dependency>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/HadminLocalTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/HadminLocalTool.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/HadminLocalTool.java
deleted file mode 100644
index 647ad4e..0000000
--- a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/HadminLocalTool.java
+++ /dev/null
@@ -1,265 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.server.hadmin.local;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.AddPrincipalCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.AddPrincipalsCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.DeletePrincipalCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.DisableConfigureCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.EnableConfigureCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.ExportKeytabsCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.GetHostRolesCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.GetPrincipalCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.HadminCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.KeytabAddCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.ListPrincipalsCmd;
-import org.apache.hadoop.has.tool.server.hadmin.local.cmd.RenamePrincipalCmd;
-import org.apache.kerby.KOptions;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.admin.kadmin.KadminOption;
-import org.apache.kerby.kerberos.tool.kadmin.AuthUtil;
-import org.apache.kerby.kerberos.tool.kadmin.ToolUtil;
-import org.apache.kerby.util.OSUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.security.auth.Subject;
-import javax.security.auth.kerberos.KerberosPrincipal;
-import javax.security.auth.login.LoginException;
-import java.io.File;
-import java.security.Principal;
-import java.util.Map;
-import java.util.Scanner;
-import java.util.Set;
-
-/**
- * Ref. MIT kadmin cmd tool usage.
- */
-public class HadminLocalTool {
-    private static final Logger LOG = LoggerFactory.getLogger(HadminLocalTool.class);
-    private static File confDir;
-
-    private static final String PROMPT = HadminLocalTool.class.getSimpleName() + ".local";
-    private static  final String USAGE = (OSUtil.isWindows()
-            ? "Usage: bin\\hadmin-local.cmd" : "Usage: sh bin/kadmin-local.sh")
-            + " <conf-dir> <-c cache_name>|<-k keytab>\n"
-            + "\tExample:\n"
-            + "\t\t"
-            + (OSUtil.isWindows()
-            ? "bin\\hadmin-local.cmd" : "sh bin/hadmin-local.sh")
-            + " conf -k admin.keytab\n";
-
-    private static void printUsage(String error) {
-        System.err.println(error + "\n");
-        System.err.println(USAGE);
-        System.exit(-1);
-    }
-
-    private static final String LEGAL_COMMANDS = "Available commands are: "
-        + "\n"
-        + "add_principal, addprinc\n"
-        + "                         Add principal\n"
-        + "delete_principal, delprinc\n"
-        + "                         Delete principal\n"
-        + "rename_principal, renprinc\n"
-        + "                         Rename principal\n"
-        + "get_principal, getprinc\n"
-        + "                         Get principal\n"
-        + "list_principals, listprincs\n"
-        + "                         List principals\n"
-        + "ktadd, xst\n"
-        + "                         Add entry(s) to a keytab\n"
-        + "get_hostroles, hostroles\n"
-        + "                         Get hostRoles\n"
-        + "export_keytabs, expkeytabs\n"
-        + "                         Export keytabs\n"
-        + "create_principals, creprincs\n"
-        + "                         Create principals\n"
-        + "enable_configure, enable\n"
-        + "                         Enable configure\n"
-        + "disable_configure, disable\n"
-        + "                         Disable configure\n";
-
-    private static void execute(LocalHasAdmin hadmin, String input) throws HasException {
-        // Omit the leading and trailing whitespace.
-        input = input.trim();
-        if (input.startsWith("cmd")) {
-            System.out.println(LEGAL_COMMANDS);
-            return;
-        }
-
-        String[] items = input.split("\\s+");
-        String cmd = items[0];
-        HadminCmd executor;
-        if (cmd.startsWith("add_principal")
-            || cmd.startsWith("addprinc")) {
-            executor = new AddPrincipalCmd(hadmin);
-        } else if (cmd.startsWith("delete_principal")
-            || cmd.startsWith("delprinc")) {
-            executor = new DeletePrincipalCmd(hadmin);
-        } else if (cmd.startsWith("rename_principal")
-            || cmd.startsWith("renprinc")) {
-            executor = new RenamePrincipalCmd(hadmin);
-        } else if (cmd.startsWith("list_principals")
-            || cmd.startsWith("listprincs")) {
-            executor = new ListPrincipalsCmd(hadmin);
-        } else if (cmd.startsWith("ktadd")
-            || cmd.startsWith("xst")) {
-            executor = new KeytabAddCmd(hadmin);
-        } else if (cmd.startsWith("get_hostroles")
-            || cmd.startsWith("hostroles")) {
-            executor = new GetHostRolesCmd(hadmin);
-        } else if (cmd.startsWith("create_principals")
-            || cmd.startsWith("creprincs")) {
-            executor = new AddPrincipalsCmd(hadmin);
-        } else if (cmd.startsWith("export_keytabs")
-            || cmd.startsWith("expkeytabs")) {
-            executor = new ExportKeytabsCmd(hadmin);
-        } else if (cmd.startsWith("enable_configure")
-            || cmd.startsWith("enable")) {
-            executor = new EnableConfigureCmd(hadmin);
-        } else if (cmd.startsWith("disable_configure")
-            || cmd.startsWith("disable")) {
-            executor = new DisableConfigureCmd(hadmin);
-        }  else if (cmd.startsWith("get_principal")
-            || cmd.startsWith("getprinc")) {
-            executor = new GetPrincipalCmd(hadmin);
-        } else {
-            System.out.println(LEGAL_COMMANDS);
-            return;
-        }
-        executor.execute(items);
-    }
-
-    private static File getConfDir(String[] args) {
-        String envDir;
-        confDir = new File(args[0]);
-        if (confDir == null || !confDir.exists()) {
-            try {
-                Map<String, String> mapEnv = System.getenv();
-                envDir = mapEnv.get("KRB5_KDC_DIR");
-            } catch (SecurityException e) {
-                envDir = null;
-            }
-            if (envDir != null) {
-                confDir = new File(envDir);
-            } else {
-                confDir = new File("/etc/kerby/"); // for Linux. TODO: fix for Win etc.
-            }
-
-            if (!confDir.exists()) {
-                throw new RuntimeException("Can not locate KDC backend directory "
-                        + confDir.getAbsolutePath());
-            }
-        }
-        LOG.info("Conf dir:" + confDir.getAbsolutePath());
-        return confDir;
-    }
-
-    public static void main(String[] args) {
-
-        if (args.length < 2) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        LocalHasAdmin hadmin;
-        try {
-            hadmin = new LocalHasAdmin(getConfDir(args));
-        } catch (KrbException e) {
-            System.err.println("Failed to init HasAdmin due to " + e.getMessage());
-            return;
-        }
-
-        KOptions kOptions = ToolUtil.parseOptions(args, 1, args.length - 1);
-        if (kOptions == null) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        String hadminPrincipal = hadmin.getHadminPrincipal();
-        Subject subject = null;
-        if (kOptions.contains(KadminOption.CCACHE)) {
-            File ccFile = kOptions.getFileOption(KadminOption.CCACHE);
-            if (ccFile == null || !ccFile.exists()) {
-                printUsage("Need the valid credentials cache file.");
-                return;
-            }
-            try {
-                subject = AuthUtil.loginUsingTicketCache(hadminPrincipal, ccFile);
-            } catch (LoginException e) {
-                System.err.println("Could not login with: " + hadminPrincipal
-                    + e.getMessage());
-                return;
-            }
-        } else if (kOptions.contains(KadminOption.K)) {
-            File keyTabFile = new File(kOptions.getStringOption(KadminOption.K));
-            if (keyTabFile == null || !keyTabFile.exists()) {
-                printUsage("Need the valid keytab file.");
-                return;
-            }
-            try {
-                subject = AuthUtil.loginUsingKeytab(hadminPrincipal, keyTabFile);
-            } catch (LoginException e) {
-                System.err.println("Could not login with: " + hadminPrincipal
-                    + e.getMessage());
-                return;
-            }
-        } else {
-            printUsage("No credentials cache file or keytab file for authentication.");
-        }
-        if (subject != null) {
-            Principal adminPrincipal = new KerberosPrincipal(hadminPrincipal);
-            Set<Principal> princSet = subject.getPrincipals();
-            if (princSet == null || princSet.isEmpty()) {
-                printUsage("The principals in subject is empty.");
-                return;
-            }
-            if (princSet.contains(adminPrincipal)) {
-                System.out.println("Login successful for user: " + hadminPrincipal);
-            } else {
-                printUsage("Login failure for " + hadminPrincipal);
-                return;
-            }
-        } else {
-            printUsage("The subject is null, login failure for " + hadminPrincipal);
-            return;
-        }
-        System.out.println("enter \"cmd\" to see legal commands.");
-        System.out.print(PROMPT + ": ");
-
-        try (Scanner scanner = new Scanner(System.in, "UTF-8")) {
-            String input = scanner.nextLine();
-
-            while (!(input.equals("quit") || input.equals("exit")
-                    || input.equals("q"))) {
-                try {
-                    execute(hadmin, input);
-                } catch (HasException e) {
-                    System.err.println(e.getMessage());
-                }
-                System.out.print(PROMPT + ": ");
-                input = scanner.nextLine();
-            }
-        }
-    }
-}


[15/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
Change the Maven groupId in HAS folder to org.apache.kerby.


Project: http://git-wip-us.apache.org/repos/asf/directory-kerby/repo
Commit: http://git-wip-us.apache.org/repos/asf/directory-kerby/commit/a8b1c28f
Tree: http://git-wip-us.apache.org/repos/asf/directory-kerby/tree/a8b1c28f
Diff: http://git-wip-us.apache.org/repos/asf/directory-kerby/diff/a8b1c28f

Branch: refs/heads/has-project
Commit: a8b1c28fa6588b00c4fe8f93808d1a070d822682
Parents: 438904f
Author: plusplusjiajia <ji...@intel.com>
Authored: Tue Nov 28 10:59:38 2017 +0800
Committer: plusplusjiajia <ji...@intel.com>
Committed: Tue Nov 28 10:59:38 2017 +0800

----------------------------------------------------------------------
 has/has-client/pom.xml                          |   4 +-
 .../has/client/AbstractHasClientPlugin.java     |  44 --
 .../hadoop/has/client/HasAdminClient.java       | 480 -------------
 .../hadoop/has/client/HasAuthAdminClient.java   | 553 ---------------
 .../org/apache/hadoop/has/client/HasClient.java | 677 ------------------
 .../hadoop/has/client/HasClientPlugin.java      |  42 --
 .../has/client/HasClientPluginRegistry.java     |  63 --
 .../hadoop/has/client/HasLoginException.java    |  37 -
 .../hadoop/has/client/HasLoginModule.java       | 491 -------------
 .../has/client/AbstractHasClientPlugin.java     |  44 ++
 .../apache/kerby/has/client/HasAdminClient.java | 480 +++++++++++++
 .../kerby/has/client/HasAuthAdminClient.java    | 553 +++++++++++++++
 .../org/apache/kerby/has/client/HasClient.java  | 677 ++++++++++++++++++
 .../kerby/has/client/HasClientPlugin.java       |  42 ++
 .../has/client/HasClientPluginRegistry.java     |  63 ++
 .../kerby/has/client/HasLoginException.java     |  37 +
 .../apache/kerby/has/client/HasLoginModule.java | 491 +++++++++++++
 has/has-common/pom.xml                          |   4 +-
 .../org/apache/hadoop/has/common/HasAdmin.java  | 140 ----
 .../org/apache/hadoop/has/common/HasConfig.java | 103 ---
 .../apache/hadoop/has/common/HasConfigKey.java  |  61 --
 .../apache/hadoop/has/common/HasException.java  |  53 --
 .../hadoop/has/common/spnego/AuthToken.java     | 217 ------
 .../has/common/spnego/AuthenticatedURL.java     | 282 --------
 .../common/spnego/AuthenticationException.java  |  54 --
 .../hadoop/has/common/spnego/Authenticator.java |  52 --
 .../common/spnego/KerberosAuthenticator.java    | 359 ----------
 .../common/spnego/KerberosHasAuthenticator.java |  25 -
 .../hadoop/has/common/spnego/KerberosUtil.java  | 262 -------
 .../hadoop/has/common/ssl/KeyStoresFactory.java | 254 -------
 .../common/ssl/ReloadingX509TrustManager.java   | 208 ------
 .../hadoop/has/common/ssl/SSLFactory.java       | 290 --------
 .../has/common/ssl/SSLHostnameVerifier.java     | 615 ----------------
 .../has/common/util/ConnectionConfigurator.java |  39 --
 .../has/common/util/HasJaasLoginUtil.java       | 261 -------
 .../apache/hadoop/has/common/util/HasUtil.java  |  93 ---
 .../hadoop/has/common/util/PlatformName.java    |  59 --
 .../hadoop/has/common/util/StringUtils.java     |  55 --
 .../has/common/util/URLConnectionFactory.java   | 215 ------
 .../org/apache/kerby/has/common/HasAdmin.java   | 140 ++++
 .../org/apache/kerby/has/common/HasConfig.java  | 103 +++
 .../apache/kerby/has/common/HasConfigKey.java   |  61 ++
 .../apache/kerby/has/common/HasException.java   |  53 ++
 .../kerby/has/common/spnego/AuthToken.java      | 217 ++++++
 .../has/common/spnego/AuthenticatedURL.java     | 282 ++++++++
 .../common/spnego/AuthenticationException.java  |  54 ++
 .../kerby/has/common/spnego/Authenticator.java  |  52 ++
 .../common/spnego/KerberosAuthenticator.java    | 359 ++++++++++
 .../common/spnego/KerberosHasAuthenticator.java |  25 +
 .../kerby/has/common/spnego/KerberosUtil.java   | 262 +++++++
 .../kerby/has/common/ssl/KeyStoresFactory.java  | 254 +++++++
 .../common/ssl/ReloadingX509TrustManager.java   | 208 ++++++
 .../apache/kerby/has/common/ssl/SSLFactory.java | 290 ++++++++
 .../has/common/ssl/SSLHostnameVerifier.java     | 615 ++++++++++++++++
 .../has/common/util/ConnectionConfigurator.java |  39 ++
 .../kerby/has/common/util/HasJaasLoginUtil.java | 261 +++++++
 .../apache/kerby/has/common/util/HasUtil.java   |  93 +++
 .../kerby/has/common/util/PlatformName.java     |  59 ++
 .../kerby/has/common/util/StringUtils.java      |  55 ++
 .../has/common/util/URLConnectionFactory.java   | 215 ++++++
 has/has-dist/bin/hadmin-local.sh                |   2 +-
 has/has-dist/bin/hadmin-remote.sh               |   2 +-
 has/has-dist/bin/kdcinit.sh                     |   2 +-
 has/has-dist/bin/kinit.sh                       |   2 +-
 has/has-dist/bin/klist.sh                       |   2 +-
 has/has-dist/bin/login-test.sh                  |   2 +-
 has/has-dist/bin/start-has.sh                   |   2 +-
 has/has-dist/pom.xml                            |  14 +-
 has/has-plugins/pom.xml                         |   8 +-
 has/has-server/pom.xml                          |   6 +-
 .../has/server/AbstractHasServerPlugin.java     |  45 --
 .../hadoop/has/server/HasAuthenException.java   |  37 -
 .../org/apache/hadoop/has/server/HasServer.java | 701 -------------------
 .../hadoop/has/server/HasServerPlugin.java      |  39 --
 .../has/server/HasServerPluginRegistry.java     |  63 --
 .../hadoop/has/server/admin/LocalHasAdmin.java  | 382 ----------
 .../hadoop/has/server/kdc/HasKdcHandler.java    | 315 ---------
 .../hadoop/has/server/kdc/MySQLConfKey.java     |  52 --
 .../has/server/kdc/MySQLIdentityBackend.java    | 426 -----------
 .../hadoop/has/server/web/ConfFilter.java       |  54 --
 .../hadoop/has/server/web/HostRoleType.java     |  55 --
 .../hadoop/has/server/web/WebConfigKey.java     |  62 --
 .../apache/hadoop/has/server/web/WebServer.java | 348 ---------
 .../hadoop/has/server/web/rest/ConfApi.java     | 196 ------
 .../hadoop/has/server/web/rest/HadminApi.java   | 455 ------------
 .../hadoop/has/server/web/rest/HasApi.java      | 336 ---------
 .../server/web/rest/param/AuthTokenParam.java   |  45 --
 .../has/server/web/rest/param/EnumParam.java    |  51 --
 .../has/server/web/rest/param/HostParam.java    |  45 --
 .../server/web/rest/param/HostRoleParam.java    |  45 --
 .../hadoop/has/server/web/rest/param/Param.java | 123 ----
 .../server/web/rest/param/PasswordParam.java    |  45 --
 .../server/web/rest/param/PrincipalParam.java   |  45 --
 .../has/server/web/rest/param/StringParam.java  |  68 --
 .../has/server/web/rest/param/TypeParam.java    |  48 --
 .../has/server/AbstractHasServerPlugin.java     |  45 ++
 .../kerby/has/server/HasAuthenException.java    |  37 +
 .../org/apache/kerby/has/server/HasServer.java  | 701 +++++++++++++++++++
 .../kerby/has/server/HasServerPlugin.java       |  39 ++
 .../has/server/HasServerPluginRegistry.java     |  63 ++
 .../kerby/has/server/admin/LocalHasAdmin.java   | 382 ++++++++++
 .../kerby/has/server/kdc/HasKdcHandler.java     | 315 +++++++++
 .../kerby/has/server/kdc/MySQLConfKey.java      |  52 ++
 .../has/server/kdc/MySQLIdentityBackend.java    | 426 +++++++++++
 .../apache/kerby/has/server/web/ConfFilter.java |  54 ++
 .../kerby/has/server/web/HostRoleType.java      |  55 ++
 .../kerby/has/server/web/WebConfigKey.java      |  62 ++
 .../apache/kerby/has/server/web/WebServer.java  | 348 +++++++++
 .../kerby/has/server/web/rest/ConfApi.java      | 196 ++++++
 .../kerby/has/server/web/rest/HadminApi.java    | 455 ++++++++++++
 .../kerby/has/server/web/rest/HasApi.java       | 336 +++++++++
 .../server/web/rest/param/AuthTokenParam.java   |  45 ++
 .../has/server/web/rest/param/EnumParam.java    |  51 ++
 .../has/server/web/rest/param/HostParam.java    |  45 ++
 .../server/web/rest/param/HostRoleParam.java    |  45 ++
 .../kerby/has/server/web/rest/param/Param.java  | 123 ++++
 .../server/web/rest/param/PasswordParam.java    |  45 ++
 .../server/web/rest/param/PrincipalParam.java   |  45 ++
 .../has/server/web/rest/param/StringParam.java  |  68 ++
 .../has/server/web/rest/param/TypeParam.java    |  48 ++
 .../hadoop/has/server/TestHasWebServer.java     | 128 ----
 .../hadoop/has/server/TestRestApiBase.java      | 336 ---------
 .../org/apache/hadoop/has/server/TestUtil.java  | 368 ----------
 .../hadoop/has/server/json/TestJsonConfApi.java |  83 ---
 .../has/server/json/TestJsonHadminApi.java      |  80 ---
 .../hadoop/has/server/json/TestJsonHasApi.java  |  54 --
 .../has/server/mysql/TestMySQLConfApi.java      |  70 --
 .../has/server/mysql/TestMySQLHadminApi.java    |  64 --
 .../has/server/mysql/TestMySQLHasApi.java       |  46 --
 .../kerby/has/server/TestHasWebServer.java      | 128 ++++
 .../kerby/has/server/TestRestApiBase.java       | 336 +++++++++
 .../org/apache/kerby/has/server/TestUtil.java   | 368 ++++++++++
 .../kerby/has/server/json/TestJsonConfApi.java  |  83 +++
 .../has/server/json/TestJsonHadminApi.java      |  80 +++
 .../kerby/has/server/json/TestJsonHasApi.java   |  54 ++
 .../has/server/mysql/TestMySQLConfApi.java      |  70 ++
 .../has/server/mysql/TestMySQLHadminApi.java    |  64 ++
 .../kerby/has/server/mysql/TestMySQLHasApi.java |  46 ++
 has/has-tool/has-client-tool/pom.xml            |   6 +-
 .../client/hadmin/remote/HadminRemoteTool.java  | 164 -----
 .../hadmin/remote/cmd/HadminRemoteCmd.java      |  49 --
 .../remote/cmd/HasRemoteAddPrincipalCmd.java    |  70 --
 .../cmd/HasRemoteCreatePrincipalsCmd.java       |  82 ---
 .../remote/cmd/HasRemoteDeletePrincipalCmd.java |  89 ---
 .../remote/cmd/HasRemoteDisableConfCmd.java     |  49 --
 .../remote/cmd/HasRemoteEnableConfCmd.java      |  49 --
 .../remote/cmd/HasRemoteExportKeytabsCmd.java   |  58 --
 .../remote/cmd/HasRemoteGetHostRolesCmd.java    |  68 --
 .../remote/cmd/HasRemoteGetPrincipalsCmd.java   |  76 --
 .../remote/cmd/HasRemoteRenamePrincipalCmd.java |  91 ---
 .../tool/client/hclient/HasClientLoginTool.java | 269 -------
 .../has/tool/client/kdcinit/HasInitTool.java    | 132 ----
 .../kdcinit/cmd/HasConfKdcBackendCmd.java       |  66 --
 .../tool/client/kdcinit/cmd/HasConfKdcCmd.java  |  54 --
 .../client/kdcinit/cmd/HasGetHasconfCmd.java    |  77 --
 .../client/kdcinit/cmd/HasGetKrb5confCmd.java   |  77 --
 .../tool/client/kdcinit/cmd/HasInitKdcCmd.java  |  94 ---
 .../client/kdcinit/cmd/HasSetPluginCmd.java     |  53 --
 .../tool/client/kdcinit/cmd/HasStartKdcCmd.java |  49 --
 .../has/tool/client/kdcinit/cmd/KdcInitCmd.java |  42 --
 .../has/tool/client/kinit/KinitOption.java      |  88 ---
 .../hadoop/has/tool/client/kinit/KinitTool.java | 384 ----------
 .../has/tool/client/klist/KlistOption.java      |  66 --
 .../hadoop/has/tool/client/klist/KlistTool.java | 293 --------
 .../client/hadmin/remote/HadminRemoteTool.java  | 164 +++++
 .../hadmin/remote/cmd/HadminRemoteCmd.java      |  49 ++
 .../remote/cmd/HasRemoteAddPrincipalCmd.java    |  70 ++
 .../cmd/HasRemoteCreatePrincipalsCmd.java       |  82 +++
 .../remote/cmd/HasRemoteDeletePrincipalCmd.java |  89 +++
 .../remote/cmd/HasRemoteDisableConfCmd.java     |  49 ++
 .../remote/cmd/HasRemoteEnableConfCmd.java      |  49 ++
 .../remote/cmd/HasRemoteExportKeytabsCmd.java   |  58 ++
 .../remote/cmd/HasRemoteGetHostRolesCmd.java    |  68 ++
 .../remote/cmd/HasRemoteGetPrincipalsCmd.java   |  76 ++
 .../remote/cmd/HasRemoteRenamePrincipalCmd.java |  91 +++
 .../tool/client/hclient/HasClientLoginTool.java | 269 +++++++
 .../has/tool/client/kdcinit/HasInitTool.java    | 132 ++++
 .../kdcinit/cmd/HasConfKdcBackendCmd.java       |  66 ++
 .../tool/client/kdcinit/cmd/HasConfKdcCmd.java  |  54 ++
 .../client/kdcinit/cmd/HasGetHasconfCmd.java    |  77 ++
 .../client/kdcinit/cmd/HasGetKrb5confCmd.java   |  77 ++
 .../tool/client/kdcinit/cmd/HasInitKdcCmd.java  |  94 +++
 .../client/kdcinit/cmd/HasSetPluginCmd.java     |  53 ++
 .../tool/client/kdcinit/cmd/HasStartKdcCmd.java |  49 ++
 .../has/tool/client/kdcinit/cmd/KdcInitCmd.java |  42 ++
 .../has/tool/client/kinit/KinitOption.java      |  88 +++
 .../kerby/has/tool/client/kinit/KinitTool.java  | 384 ++++++++++
 .../has/tool/client/klist/KlistOption.java      |  66 ++
 .../kerby/has/tool/client/klist/KlistTool.java  | 293 ++++++++
 has/has-tool/has-server-tool/pom.xml            |   6 +-
 .../server/hadmin/local/HadminLocalTool.java    | 265 -------
 .../hadmin/local/cmd/AddPrincipalCmd.java       |  61 --
 .../hadmin/local/cmd/AddPrincipalsCmd.java      |  78 ---
 .../hadmin/local/cmd/DeletePrincipalCmd.java    |  80 ---
 .../hadmin/local/cmd/DisableConfigureCmd.java   |  40 --
 .../hadmin/local/cmd/EnableConfigureCmd.java    |  40 --
 .../hadmin/local/cmd/ExportKeytabsCmd.java      |  57 --
 .../hadmin/local/cmd/GetHostRolesCmd.java       |  36 -
 .../hadmin/local/cmd/GetPrincipalCmd.java       |  76 --
 .../tool/server/hadmin/local/cmd/HadminCmd.java |  42 --
 .../server/hadmin/local/cmd/KeytabAddCmd.java   |  91 ---
 .../hadmin/local/cmd/ListPrincipalsCmd.java     |  63 --
 .../hadmin/local/cmd/RenamePrincipalCmd.java    |  82 ---
 .../server/hadmin/local/HadminLocalTool.java    | 265 +++++++
 .../hadmin/local/cmd/AddPrincipalCmd.java       |  61 ++
 .../hadmin/local/cmd/AddPrincipalsCmd.java      |  78 +++
 .../hadmin/local/cmd/DeletePrincipalCmd.java    |  80 +++
 .../hadmin/local/cmd/DisableConfigureCmd.java   |  40 ++
 .../hadmin/local/cmd/EnableConfigureCmd.java    |  40 ++
 .../hadmin/local/cmd/ExportKeytabsCmd.java      |  57 ++
 .../hadmin/local/cmd/GetHostRolesCmd.java       |  36 +
 .../hadmin/local/cmd/GetPrincipalCmd.java       |  76 ++
 .../tool/server/hadmin/local/cmd/HadminCmd.java |  42 ++
 .../server/hadmin/local/cmd/KeytabAddCmd.java   |  91 +++
 .../hadmin/local/cmd/ListPrincipalsCmd.java     |  63 ++
 .../hadmin/local/cmd/RenamePrincipalCmd.java    |  82 +++
 has/has-tool/pom.xml                            |   2 +-
 has/pom.xml                                     |   3 +-
 has/supports/hadoop/hadoop-2.7.2.patch          |   4 +-
 .../hbase/hbase-1.1.10-hadoop-2.5.1.patch       |   2 +-
 has/supports/zookeeper/conf/jaas.conf           |   2 +-
 has/supports/zookeeper/pom.xml                  |   4 +-
 222 files changed, 15034 insertions(+), 15033 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-client/pom.xml b/has/has-client/pom.xml
index c5cabe9..e641882 100644
--- a/has/has-client/pom.xml
+++ b/has/has-client/pom.xml
@@ -5,7 +5,7 @@
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
-    <groupId>org.apache.hadoop</groupId>
+    <groupId>org.apache.kerby</groupId>
     <artifactId>has-project</artifactId>
     <version>1.0.0-SNAPSHOT</version>
   </parent>
@@ -53,7 +53,7 @@
       <version>${slf4j.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.kerby</groupId>
       <artifactId>has-common</artifactId>
       <version>${project.version}</version>
     </dependency>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/hadoop/has/client/AbstractHasClientPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/AbstractHasClientPlugin.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/AbstractHasClientPlugin.java
deleted file mode 100644
index 9ff9749..0000000
--- a/has/has-client/src/main/java/org/apache/hadoop/has/client/AbstractHasClientPlugin.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.client;
-
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.kerby.kerberos.kerb.KrbRuntime;
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public abstract class AbstractHasClientPlugin implements HasClientPlugin {
-    public static final Logger LOG = LoggerFactory.getLogger(AbstractHasClientPlugin.class);
-
-    protected abstract void doLogin(AuthToken token) throws HasLoginException;
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public AuthToken login(HasConfig conf) throws HasLoginException {
-
-        AuthToken authToken = KrbRuntime.getTokenProvider("JWT").createTokenFactory().createToken();
-
-        doLogin(authToken);
-
-        return authToken;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAdminClient.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAdminClient.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAdminClient.java
deleted file mode 100644
index 3f5e3fa..0000000
--- a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAdminClient.java
+++ /dev/null
@@ -1,480 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.client;
-
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.WebResource;
-import com.sun.jersey.api.client.config.ClientConfig;
-import com.sun.jersey.api.client.config.DefaultClientConfig;
-import com.sun.jersey.client.urlconnection.HTTPSProperties;
-import com.sun.jersey.core.util.MultivaluedMapImpl;
-import org.apache.hadoop.has.common.HasAdmin;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.ssl.SSLFactory;
-import org.apache.hadoop.has.common.util.URLConnectionFactory;
-import org.apache.kerby.kerberos.kerb.common.KrbUtil;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.glassfish.jersey.SslConfigurator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.net.ssl.HostnameVerifier;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.SSLSession;
-import javax.ws.rs.core.MultivaluedMap;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * A Admin client API for applications to interact with KDC
- */
-public class HasAdminClient implements HasAdmin {
-
-    public static final Logger LOG = LoggerFactory.getLogger(HasAdminClient.class);
-
-    private HasConfig hasConfig;
-    private File confDir;
-
-    public HasAdminClient(HasConfig hasConfig) {
-        this.hasConfig = hasConfig;
-    }
-    public HasAdminClient(HasConfig hasConfig, File confDir) {
-        this.hasConfig = hasConfig;
-        this.confDir = confDir;
-    }
-
-    public File getConfDir() {
-        return confDir;
-    }
-
-    public HasConfig getHasConfig() {
-        return hasConfig;
-    }
-
-    protected HttpURLConnection getHttpsConnection(URL url, boolean isSpnego) throws Exception {
-        HasConfig conf = new HasConfig();
-
-        conf.setString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
-        String sslClientConf = hasConfig.getSslClientConf();
-        conf.setString(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConf);
-        conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, false);
-
-        URLConnectionFactory connectionFactory = URLConnectionFactory
-                .newDefaultURLConnectionFactory(conf);
-        return (HttpURLConnection) connectionFactory.openConnection(url, isSpnego, hasConfig);
-    }
-
-    private WebResource getWebResource(String restName) {
-        Client client;
-        String server = null;
-        if ((hasConfig.getHttpsPort() != null) && (hasConfig.getHttpsHost() != null)) {
-            server = "https://" + hasConfig.getHttpsHost() + ":" + hasConfig.getHttpsPort()
-                    + "/has/v1/" + restName;
-            LOG.info("Admin request url: " + server);
-            HasConfig conf = new HasConfig();
-            try {
-                conf.addIniConfig(new File(hasConfig.getSslClientConf()));
-            } catch (IOException e) {
-                throw new RuntimeException("Errors occurred when adding ssl conf. "
-                    + e.getMessage());
-            }
-            SslConfigurator sslConfigurator = SslConfigurator.newInstance()
-                    .trustStoreFile(conf.getString("ssl.client.truststore.location"))
-                    .trustStorePassword(conf.getString("ssl.client.truststore.password"));
-            sslConfigurator.securityProtocol("SSL");
-            SSLContext sslContext = sslConfigurator.createSSLContext();
-            ClientConfig clientConfig = new DefaultClientConfig();
-            clientConfig.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES,
-                    new HTTPSProperties(new HostnameVerifier() {
-                        @Override
-                        public boolean verify(String s, SSLSession sslSession) {
-                            return false;
-                        }
-                    }, sslContext));
-            client = Client.create(clientConfig);
-        } else {
-            client = Client.create();
-        }
-        if (server == null) {
-            throw new RuntimeException("Please set the https address and port.");
-        }
-        return client.resource(server);
-    }
-
-    /**
-     * Change principals JSON string to a List.
-     *
-     * @param princs principals JSON string which like
-     *               "["HTTP\/host1@HADOOP.COM","HTTP\/host2@HADOOP.COM"]"
-     * @return principalLists.
-     */
-    private List<String> getPrincsList(String princs) {
-        List<String> principalLists = new ArrayList<>();
-        try {
-            JSONArray principals = new JSONArray(princs);
-            for (int i = 0; i < principals.length(); i++) {
-                principalLists.add("\t" + principals.getString(i));
-            }
-        } catch (Exception e) {
-            System.err.println("Errors occurred when getting the principals."
-                + e.getMessage());
-        }
-        return principalLists;
-    }
-
-    public void requestCreatePrincipals(String hostRoles) throws HasException {
-        WebResource webResource = getWebResource("admin/createprincipals");
-        String response = webResource.entity(hostRoles.toString().getBytes()).put(String.class);
-        try {
-            System.out.println(new JSONObject(response).getString("msg"));
-        } catch (JSONException e) {
-            throw new HasException(e);
-        }
-    }
-
-    @Override
-    public void addPrincipal(String principal) throws HasException {
-        WebResource webResource = getWebResource("admin/addprincipal");
-
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("principal", principal);
-        String response = webResource.queryParams(params).post(String.class);
-        try {
-            System.out.println(new JSONObject(response).getString("msg"));
-        } catch (JSONException e) {
-            System.err.println("Errors occurred when getting the message from response."
-                + e.getMessage());
-        }
-    }
-
-    @Override
-    public File getKeytabByHostAndRole(String host, String role) throws HasException {
-        WebResource webResource = getWebResource("admin/exportkeytabs");
-
-        String keytabName = host + ".zip";
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("host", host);
-        if (!role.equals("")) {
-            params.add("role", role);
-            keytabName = role + "-" + host + ".keytab";
-        }
-        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
-        if (response.getStatus() != 200) {
-            System.err.println("Error : connection denied.");
-            return null;
-        }
-        FileOutputStream fos = null;
-        try {
-            fos = new FileOutputStream(new File(keytabName));
-        } catch (FileNotFoundException e) {
-            System.err.println(e.getMessage());
-        }
-        InputStream in = response.getEntityInputStream();
-        byte[] buffer = new byte[4 * 1024];
-        int read;
-        try {
-            while ((read = in.read(buffer)) > 0) {
-                fos.write(buffer, 0, read);
-            }
-            fos.close();
-            in.close();
-        } catch (IOException e) {
-            System.err.println("Errors occurred when reading the buffer to write keytab file."
-                + e.getMessage());
-        }
-        System.out.println("Accept keytab file \"" + keytabName + "\" from server.");
-        return new File(keytabName);
-    }
-
-    @Override
-    public void addPrincipal(String principal, String password) throws HasException {
-        WebResource webResource = getWebResource("admin/addprincipal");
-
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("principal", principal);
-        params.add("password", password);
-        String response = webResource.queryParams(params).post(String.class);
-        try {
-            System.out.println(new JSONObject(response).getString("msg"));
-        } catch (JSONException e) {
-            System.err.println("Errors occurred when getting the message from response."
-                + e.getMessage());
-        }
-    }
-
-    @Override
-    public void deletePrincipal(String principal) throws HasException {
-        WebResource webResource = getWebResource("admin/deleteprincipal");
-
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("principal", principal);
-        String response = webResource.queryParams(params).delete(String.class);
-        try {
-            System.out.println(new JSONObject(response).getString("msg"));
-        } catch (JSONException e) {
-            System.err.println("Errors occurred when getting the message from response."
-                + e.getMessage());
-        }
-    }
-
-    @Override
-    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
-        WebResource webResource = getWebResource("admin/renameprincipal");
-
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("oldprincipal", oldPrincipal);
-        params.add("newprincipal", newPrincipal);
-        String response = webResource.queryParams(params).post(String.class);
-        try {
-            System.out.println(new JSONObject(response).getString("msg"));
-        } catch (JSONException e) {
-            System.err.println(e.getMessage());
-        }
-    }
-
-    @Override
-    public List<String> getPrincipals() throws HasException {
-        WebResource webResource = getWebResource("admin/getprincipals");
-
-        String response = webResource.get(String.class);
-        String princs = null;
-        try {
-            princs = new JSONObject(response).getString("msg");
-        } catch (JSONException e) {
-            System.err.println("Errors occurred when getting the message from response."
-                + e.getMessage());
-        }
-        return getPrincsList(princs);
-    }
-
-    @Override
-    public List<String> getPrincipals(String exp) throws HasException {
-        WebResource webResource = getWebResource("admin/getprincipals");
-
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("exp", exp);
-        String response = webResource.queryParams(params).get(String.class);
-        return getPrincsList(response);
-    }
-
-    /**
-     * Create http connection to has server.
-     *
-     * @param url
-     * @param method
-     * @return connection
-     * @throws IOException
-     */
-    protected HttpURLConnection createConnection(URL url, String method) throws IOException {
-        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-        conn.setRequestMethod(method);
-        if (method.equals("POST") || method.equals("PUT")) {
-            conn.setDoOutput(true);
-        }
-        return conn;
-    }
-
-    @Override
-    public String addPrincByRole(String host, String role) throws HasException {
-        //TODO
-        return null;
-    }
-
-    @Override
-    public String getHadminPrincipal() {
-        return KrbUtil.makeKadminPrincipal(hasConfig.getRealm()).getName();
-    }
-
-    /**
-     * get size of principal
-     */
-    @Override
-    public int size() throws HasException {
-        return this.getPrincipals().size();
-    }
-
-    public String getKrb5conf() {
-        WebResource webResource = getWebResource("getkrb5conf");
-        ClientResponse response = webResource.get(ClientResponse.class);
-        if (response.getStatus() == 200) {
-            return response.getEntity(String.class);
-        }
-        return null;
-    }
-
-    public String getHasconf() {
-        WebResource webResource = getWebResource("gethasconf");
-        ClientResponse response = webResource.get(ClientResponse.class);
-        if (response.getStatus() == 200) {
-            return response.getEntity(String.class);
-        }
-        return null;
-    }
-    public void setPlugin(String plugin) {
-        WebResource webResource = getWebResource("conf/setplugin");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("plugin", plugin);
-        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
-        if (response.getStatus() == 200) {
-            System.out.println(response.getEntity(String.class));
-        } else if (response.getStatus() == 400) {
-            System.err.println(response.getEntity(String.class));
-        }
-    }
-    public void configKdc(String port, String realm, String host) {
-        WebResource webResource = getWebResource("conf/configkdc");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("port", port);
-        params.add("realm", realm);
-        params.add("host", host);
-        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
-        if (response.getStatus() == 200) {
-            System.out.println(response.getEntity(String.class));
-        } else if (response.getStatus() == 400) {
-            System.err.println(response.getEntity(String.class));
-        }
-    }
-    public void configKdcBackend(String backendType, String dir, String url, String user,
-                                 String password) {
-        WebResource webResource = getWebResource("conf/configkdcbackend");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("backendType", backendType);
-        if (backendType.equals("json")) {
-            params.add("dir", dir);
-        } else if (backendType.equals("mysql")) {
-            params.add("url", url);
-            params.add("user", user);
-            params.add("password", password);
-        }
-        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
-        if (response.getStatus() == 200) {
-            System.out.println(response.getEntity(String.class));
-        } else if (response.getStatus() == 400) {
-            System.err.println(response.getEntity(String.class));
-        }
-    }
-    public void startKdc() {
-        WebResource webResource = getWebResource("kdcstart");
-        ClientResponse response = webResource.get(ClientResponse.class);
-        try {
-            JSONObject result = new JSONObject(response.getEntity(String.class));
-            if (result.getString("result").equals("success")) {
-                System.out.println(result.getString("msg"));
-            } else {
-                System.err.println(result.getString("msg"));
-            }
-        } catch (JSONException e) {
-            System.err.println(e.getMessage());
-        }
-    }
-    public InputStream initKdc() {
-        WebResource webResource = getWebResource("kdcinit");
-        ClientResponse response = webResource.get(ClientResponse.class);
-        if (response.getStatus() == 200) {
-            return response.getEntityInputStream();
-        }
-        return null;
-    }
-    public String getHostRoles() {
-        WebResource webResource = getWebResource("hostroles");
-        ClientResponse response = webResource.get(ClientResponse.class);
-        if (response.getStatus() == 200) {
-            return response.getEntity(String.class);
-        }
-        return null;
-    }
-    public void setEnableOfConf(String isEnable) throws HasException {
-        WebResource webResource = getWebResource("admin/setconf");
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("isEnable", isEnable);
-        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
-        if (response.getStatus() == 200) {
-            System.out.println(response.getEntity(String.class));
-        } else {
-            System.err.println(response.getEntity(String.class));
-        }
-    }
-
-    @Override
-    public void exportKeytab(File keytab, String principal) throws HasException {
-        WebResource webResource = getWebResource("admin/exportkeytab");
-
-        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-        params.add("principal", principal);
-        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
-        FileOutputStream fos;
-        try {
-            fos = new FileOutputStream(keytab);
-        } catch (FileNotFoundException e) {
-            throw new HasException("The keytab file: " + keytab + "not exist. " + e);
-        }
-        InputStream in = response.getEntityInputStream();
-        byte[] buffer = new byte[4 * 1024];
-        int read;
-        try {
-            while ((read = in.read(buffer)) > 0) {
-                fos.write(buffer, 0, read);
-            }
-            fos.close();
-            in.close();
-        } catch (IOException e) {
-            System.err.println("Errors occurred when writing the buffer to keytab file." + e.toString());
-        }
-        System.out.println("Accept keytab file \"" + keytab.getName() + "\" from server successfully.");
-    }
-
-    @Override
-    public void exportKeytab(File keytabFile, List<String> principals) throws HasException {
-        WebResource webResource = getWebResource("admin/exportkeytab");
-        for (String principal: principals) {
-            MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-            params.add("principal", principal);
-            ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
-            FileOutputStream fos;
-            try {
-                fos = new FileOutputStream(keytabFile);
-            } catch (FileNotFoundException e) {
-                throw new HasException("The keytab file: " + keytabFile.getName() + "not exist. " + e);
-            }
-            InputStream in = response.getEntityInputStream();
-            byte[] buffer = new byte[4 * 1024];
-            int read;
-            try {
-                while ((read = in.read(buffer)) > 0) {
-                    fos.write(buffer, 0, read);
-                }
-                fos.close();
-                in.close();
-            } catch (IOException e) {
-                LOG.error("Errors occurred when writing the buffer to keytab file." + e.toString());
-            }
-        }
-        System.out.println("Accept keytab file \"" + keytabFile.getName() + "\" from server successfully.");
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAuthAdminClient.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAuthAdminClient.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAuthAdminClient.java
deleted file mode 100644
index d7e3f5a..0000000
--- a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAuthAdminClient.java
+++ /dev/null
@@ -1,553 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.client;
-
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.net.HttpURLConnection;
-import java.net.MalformedURLException;
-import java.net.ProtocolException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-
-public class HasAuthAdminClient extends HasAdminClient {
-    public static final Logger LOG = LoggerFactory.getLogger(HasAuthAdminClient.class);
-
-    /**
-     * Create an instance of the HasAuthAdminClient.
-     *
-     * @param hasConfig the has config
-     */
-    public HasAuthAdminClient(HasConfig hasConfig) {
-        super(hasConfig);
-    }
-
-    /**
-     * Create an authenticated connection to the Has server.
-     * <p>
-     * It uses Hadoop-auth client authentication which by default supports
-     * Kerberos HTTP SPNEGO, Pseudo/Simple and anonymous.
-     *
-     * @param url    the URL to open a HTTP connection to.
-     * @param method the HTTP method for the HTTP connection.
-     * @return an authenticated connection to the has server.
-     * @throws IOException if an IO error occurred.
-     */
-    @Override
-    protected HttpURLConnection createConnection(URL url, String method) {
-        HttpURLConnection conn = null;
-        if ((getHasConfig().getHttpsPort() != null) && (getHasConfig().getHttpsHost() != null)) {
-            try {
-                conn = super.getHttpsConnection(url, true);
-            } catch (Exception e) {
-                System.err.println(e.getMessage());
-            }
-        }
-        if (method.equals("POST") || method.equals("PUT")) {
-            conn.setDoOutput(true);
-        }
-        return conn;
-    }
-
-    private String getBaseURL() {
-        String url = null;
-        if ((getHasConfig().getHttpsPort() != null) && (getHasConfig().getHttpsHost() != null)) {
-            url = "https://" + getHasConfig().getHttpsHost() + ":" + getHasConfig().getHttpsPort()
-                + "/has/v1/admin/";
-        }
-        if (url == null) {
-            throw new RuntimeException("Please set the https address and port.");
-        }
-        return url;
-    }
-
-    public void addPrincipal(String principal) throws HasException {
-        HttpURLConnection httpConn;
-
-        URL url;
-        try {
-            url = new URL(getBaseURL() + "addprincipal?principal=" + principal);
-        } catch (MalformedURLException e) {
-            throw new HasException(e);
-        }
-
-        httpConn = createConnection(url, "POST");
-
-        httpConn.setRequestProperty("Content-Type",
-            "application/json; charset=UTF-8");
-        try {
-            httpConn.setRequestMethod("POST");
-        } catch (ProtocolException e) {
-            LOG.error("Fail to add principal. " + e);
-            throw new HasException(e);
-        }
-        try {
-            httpConn.setDoOutput(true);
-            httpConn.setDoInput(true);
-            httpConn.connect();
-
-            if (httpConn.getResponseCode() == 200) {
-                System.out.println(getResponse(httpConn));
-            } else {
-                throw new HasException("Fail to add principal \"" + principal + "\".");
-            }
-        } catch (Exception e) {
-            LOG.error("Fail to add principal. " + e);
-            throw new HasException(e);
-        }
-    }
-
-    public void setEnableOfConf(String isEnable) throws HasException {
-        HttpURLConnection httpConn;
-
-        URL url;
-        try {
-            url = new URL(getBaseURL() + "setconf?isEnable=" + isEnable);
-        } catch (MalformedURLException e) {
-            throw new HasException(e);
-        }
-
-        httpConn = createConnection(url, "PUT");
-
-        httpConn.setRequestProperty("Content-Type",
-                "application/json; charset=UTF-8");
-        try {
-            httpConn.setRequestMethod("PUT");
-        } catch (ProtocolException e) {
-            throw new HasException(e);
-        }
-        try {
-            httpConn.setDoOutput(true);
-            httpConn.setDoInput(true);
-            httpConn.connect();
-            InputStream inputStream = httpConn.getResponseCode() == 200
-                    ? httpConn.getInputStream() : httpConn.getErrorStream();
-            BufferedReader reader = new BufferedReader(
-                    new InputStreamReader(inputStream));
-            String s;
-            StringBuilder result = new StringBuilder();
-            while ((s = reader.readLine()) != null) {
-                result.append(s);
-            }
-            if (httpConn.getResponseCode() == 200) {
-                System.out.println(result);
-            } else {
-                System.err.println(result);
-            }
-        } catch (Exception e) {
-            LOG.error("Fail to connect to server. " + e);
-            throw new HasException(e);
-        }
-    }
-
-    /**
-     * Change principals JSON string to a List.
-     *
-     * @param princs principals JSON string which like
-     *               "["HTTP\/host1@HADOOP.COM","HTTP\/host2@HADOOP.COM"]"
-     * @return principalLists.
-     */
-    private List<String> getPrincsList(String princs) {
-        List<String> principalLists = new ArrayList<>();
-        try {
-            JSONArray principals = new JSONArray(princs);
-            for (int i = 0; i < principals.length(); i++) {
-                principalLists.add("\t" + principals.getString(i));
-            }
-        } catch (Exception e) {
-            System.err.println(e.getMessage());
-        }
-        return principalLists;
-    }
-
-    @Override
-    public void requestCreatePrincipals(String hostRoles) throws HasException {
-        HttpURLConnection httpConn;
-
-        URL url;
-        try {
-            url = new URL(getBaseURL() + "createprincipals");
-        } catch (MalformedURLException e) {
-            throw new HasException(e);
-        }
-
-        httpConn = createConnection(url, "POST");
-
-        httpConn.setRequestProperty("Content-Type",
-                "application/json; charset=UTF-8");
-        try {
-            httpConn.setRequestMethod("PUT");
-        } catch (ProtocolException e) {
-            throw new HasException(e);
-        }
-        httpConn.setDoOutput(true);
-        httpConn.setDoInput(true);
-        try {
-            httpConn.connect();
-            OutputStream out = httpConn.getOutputStream();
-            out.write(hostRoles.toString().getBytes());
-            out.flush();
-            out.close();
-            if (httpConn.getResponseCode() == 200) {
-                System.out.println(getResponse(httpConn));
-            } else {
-                throw new HasException("Connection deined.");
-            }
-        } catch (Exception e) {
-            throw new HasException(e);
-        }
-    }
-
-    @Override
-    public File getKeytabByHostAndRole(String host, String role) throws HasException {
-        String keytabName = host + ".zip";
-        HttpURLConnection httpConn;
-        String request = getBaseURL() + "exportkeytabs?host=" + host;
-        if (!role.equals("")) {
-            request = request + "&role=" + role;
-            keytabName = role + "-" + host + ".keytab";
-        }
-
-        URL url;
-        try {
-            url = new URL(request);
-        } catch (MalformedURLException e) {
-            throw new HasException(e);
-        }
-
-        httpConn = createConnection(url, "GET");
-
-        httpConn.setRequestProperty("Content-Type",
-            "application/json; charset=UTF-8");
-        try {
-            httpConn.setRequestMethod("GET");
-        } catch (ProtocolException e) {
-            throw new HasException(e);
-        }
-        httpConn.setDoOutput(true);
-        httpConn.setDoInput(true);
-        try {
-            httpConn.connect();
-
-            if (httpConn.getResponseCode() != 200) {
-                System.err.println("Error : connection denied.");
-                return null;
-            }
-            FileOutputStream fos = new FileOutputStream(new File(keytabName));
-            InputStream in = httpConn.getInputStream();
-            byte[] buffer = new byte[4 * 1024];
-            int read;
-            while ((read = in.read(buffer)) > 0) {
-                fos.write(buffer, 0, read);
-            }
-            fos.close();
-            in.close();
-        } catch (IOException e) {
-            throw new HasException(e);
-        }
-        System.out.println("Accept keytab file \"" + keytabName + "\" from server.");
-
-        return new File(keytabName);
-    }
-
-    @Override
-    public void exportKeytab(File keytab, String principal) throws HasException {
-        URL url = null;
-        try {
-            url = new URL(getBaseURL() + "exportkeytab?principal=" + principal);
-        } catch (MalformedURLException e) {
-            LOG.error("Fail to get url. " + e);
-            throw new HasException("Fail to get url.", e);
-        }
-
-        HttpURLConnection httpConn = createConnection(url, "GET");
-        httpConn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
-        try {
-            httpConn.setRequestMethod("GET");
-        } catch (ProtocolException e) {
-            throw new HasException(e);
-        }
-        httpConn.setDoOutput(true);
-        httpConn.setDoInput(true);
-        try {
-            httpConn.connect();
-            if (httpConn.getResponseCode() != 200) {
-                System.err.println("Error: connection denied.");
-            }
-            FileOutputStream fos = new FileOutputStream(keytab);
-            InputStream in = httpConn.getInputStream();
-            byte[] buffer = new byte[3 * 1024];
-            int read;
-            while ((read = in.read(buffer)) > 0) {
-                fos.write(buffer, 0, read);
-            }
-            fos.close();
-            in.close();
-        } catch (IOException e) {
-            throw new HasException(e);
-        }
-        System.out.println("Receive keytab file \"" + keytab.getName() + "\" from server successfully.");
-    }
-
-    @Override
-    public void exportKeytab(File keytabFile, List<String> principals) throws HasException {
-        HttpURLConnection httpConn;
-        for (String principal: principals) {
-            String request = getBaseURL() + "exportkeytab?principal=" + principal;
-            URL url;
-            try {
-                url = new URL(request);
-            } catch (MalformedURLException e) {
-                throw new HasException(e);
-            }
-            httpConn = createConnection(url, "GET");
-            httpConn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
-            try {
-                httpConn.setRequestMethod("GET");
-            } catch (ProtocolException e) {
-                throw new HasException(e);
-            }
-            httpConn.setDoOutput(true);
-            httpConn.setDoInput(true);
-            try {
-                httpConn.connect();
-                if (httpConn.getResponseCode() != 200) {
-                    System.err.println("Error: connection denied.");
-                }
-                FileOutputStream fos = new FileOutputStream(keytabFile);
-                InputStream in = httpConn.getInputStream();
-                byte[] buffer = new byte[4 * 1024];
-                int read;
-                while ((read = in.read(buffer)) > 0) {
-                    fos.write(buffer, 0, read);
-                }
-                fos.close();
-                in.close();
-            } catch (IOException e) {
-                throw new HasException(e);
-            }
-        }
-        System.out.println("Accept keytab file \"" + keytabFile.getName() + "\" from server.");
-    }
-
-    @Override
-    public void addPrincipal(String principal, String password) throws HasException {
-        HttpURLConnection httpConn;
-
-        URL url = null;
-        try {
-            url = new URL(getBaseURL() + "addprincipal?principal=" + principal
-                            + "&password=" + password);
-        } catch (MalformedURLException e) {
-            throw new HasException("Fail to get url.", e);
-        }
-
-        httpConn = createConnection(url, "POST");
-
-        httpConn.setRequestProperty("Content-Type",
-                "application/json; charset=UTF-8");
-        try {
-            httpConn.setRequestMethod("POST");
-        } catch (ProtocolException e) {
-            throw new HasException(e);
-        }
-        try {
-            httpConn.setDoOutput(true);
-            httpConn.setDoInput(true);
-            httpConn.connect();
-
-            if (httpConn.getResponseCode() == 200) {
-                System.out.println(getResponse(httpConn));
-            } else {
-                throw new HasException("Fail to add principal \"" + principal + "\".");
-            }
-        } catch (Exception e) {
-            throw new HasException(e);
-        }
-    }
-
-    @Override
-    public void deletePrincipal(String principal) throws HasException {
-        HttpURLConnection httpConn;
-
-        URL url;
-        try {
-            url = new URL(getBaseURL() + "deleteprincipal?principal=" + principal);
-        } catch (MalformedURLException e) {
-            throw new HasException(e);
-        }
-
-        httpConn = createConnection(url, "DELETE");
-
-        httpConn.setRequestProperty("Content-Type",
-                "application/json; charset=UTF-8");
-        try {
-            httpConn.setRequestMethod("DELETE");
-        } catch (ProtocolException e) {
-            throw new HasException(e);
-        }
-        try {
-            httpConn.setDoOutput(true);
-            httpConn.setDoInput(true);
-            httpConn.connect();
-
-            if (httpConn.getResponseCode() == 200) {
-                System.out.println(getResponse(httpConn));
-            } else {
-                throw new HasException("Connection deined.");
-            }
-        } catch (Exception e) {
-            throw new HasException(e);
-        }
-    }
-
-    @Override
-    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
-        HttpURLConnection httpConn;
-
-        URL url;
-        try {
-            url = new URL(getBaseURL() + "renameprincipal?oldprincipal=" + oldPrincipal
-                            + "&newprincipal=" + newPrincipal);
-        } catch (MalformedURLException e) {
-            throw new HasException(e);
-        }
-
-        httpConn = createConnection(url, "POST");
-
-        httpConn.setRequestProperty("Content-Type",
-                "application/json; charset=UTF-8");
-        try {
-            httpConn.setRequestMethod("POST");
-        } catch (ProtocolException e) {
-            throw new HasException(e);
-        }
-        try {
-            httpConn.setDoOutput(true);
-            httpConn.setDoInput(true);
-            httpConn.connect();
-
-            if (httpConn.getResponseCode() == 200) {
-                System.out.println(getResponse(httpConn));
-            } else {
-                throw new HasException("Connection to renameprincipal deined.");
-            }
-        } catch (Exception e) {
-            throw new HasException(e);
-        }
-    }
-
-    @Override
-    public List<String> getPrincipals() throws HasException {
-        HttpURLConnection httpConn;
-
-        URL url;
-        try {
-            url = new URL(getBaseURL() + "getprincipals");
-        } catch (MalformedURLException e) {
-            System.err.println(e.getMessage());
-            throw new HasException(e);
-        }
-
-        httpConn = createConnection(url, "GET");
-
-        httpConn.setRequestProperty("Content-Type",
-                "application/json; charset=UTF-8");
-        try {
-            httpConn.setRequestMethod("GET");
-        } catch (ProtocolException e) {
-            throw new HasException(e);
-        }
-        String response;
-        try {
-            httpConn.setDoInput(true);
-            httpConn.connect();
-
-            if (httpConn.getResponseCode() == 200) {
-                response = getResponse(httpConn);
-            } else {
-                throw new HasException("Connection to getprincipals deined.");
-            }
-        } catch (Exception e) {
-            LOG.error("Fail to get principals." + e);
-            throw new HasException("Fail to get principals.", e);
-        }
-        return getPrincsList(response);
-    }
-
-    @Override
-    public List<String> getPrincipals(String exp) throws HasException {
-        HttpURLConnection httpConn;
-
-        URL url;
-        try {
-            url = new URL(getBaseURL() + "getprincipals?exp=" + exp);
-        } catch (MalformedURLException e) {
-            throw new HasException(e);
-        }
-
-        httpConn = createConnection(url, "GET");
-
-        httpConn.setRequestProperty("Content-Type",
-                "application/json; charset=UTF-8");
-        try {
-            httpConn.setRequestMethod("GET");
-        } catch (ProtocolException e) {
-            LOG.error("Fail to get the principals with expression. " + e);
-            throw new HasException("Fail to get the principals with expression.", e);
-        }
-        String response;
-        try {
-            httpConn.setDoOutput(true);
-            httpConn.setDoInput(true);
-            httpConn.connect();
-
-            if (httpConn.getResponseCode() == 200) {
-                response = getResponse(httpConn);
-            } else {
-                throw new HasException("Connection to getprincipals deined.");
-            }
-        } catch (Exception e) {
-            throw new HasException(e);
-        }
-        return getPrincsList(response);
-    }
-
-    private String getResponse(HttpURLConnection httpConn) throws Exception {
-        StringBuilder data = new StringBuilder();
-        BufferedReader br = new BufferedReader(new InputStreamReader(httpConn.getInputStream()));
-        String s;
-        while ((s = br.readLine()) != null) {
-            data.append(s);
-        }
-        return new JSONObject(data.toString()).getString("msg");
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClient.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClient.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClient.java
deleted file mode 100755
index 5f612d3..0000000
--- a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClient.java
+++ /dev/null
@@ -1,677 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.client;
-
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.ClientHandlerException;
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.WebResource;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.text.CharacterPredicates;
-import org.apache.commons.text.RandomStringGenerator;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasConfigKey;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.ssl.SSLFactory;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.common.util.URLConnectionFactory;
-import org.apache.kerby.kerberos.kerb.KrbCodec;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.KrbRuntime;
-import org.apache.kerby.kerberos.kerb.crypto.EncryptionHandler;
-import org.apache.kerby.kerberos.kerb.provider.TokenEncoder;
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptedData;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
-import org.apache.kerby.kerberos.kerb.type.base.KeyUsage;
-import org.apache.kerby.kerberos.kerb.type.base.KrbError;
-import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
-import org.apache.kerby.kerberos.kerb.type.base.KrbMessageType;
-import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
-import org.apache.kerby.kerberos.kerb.type.kdc.EncAsRepPart;
-import org.apache.kerby.kerberos.kerb.type.kdc.EncKdcRepPart;
-import org.apache.kerby.kerberos.kerb.type.kdc.KdcRep;
-import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
-import org.apache.kerby.util.IOUtil;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.PrintStream;
-import java.net.HttpURLConnection;
-import java.net.MalformedURLException;
-import java.net.ProtocolException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.nio.ByteBuffer;
-import java.security.GeneralSecurityException;
-import java.security.KeyStore;
-import java.security.PublicKey;
-import java.security.cert.CertificateException;
-import java.security.cert.CertificateFactory;
-import java.security.cert.X509Certificate;
-import java.util.Date;
-
-/**
- * HAS client
- */
-public class HasClient {
-
-    public static final Logger LOG = LoggerFactory.getLogger(HasClient.class);
-
-    public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf";
-    public static final String HAS_HTTP_PORT_DEFAULT = "9870";
-    public static final String HAS_CONFIG_DEFAULT = "/etc/has/has-client.conf";
-    public static final String CA_ROOT_DEFAULT = "/etc/has/ca-root.pem";
-
-    private String hadoopSecurityHas = null;
-    private String type;
-    private File clientConfigFolder;
-
-
-    public HasClient() { }
-
-    /**
-     * Create an instance of the HasClient.
-     *
-     * @param hadoopSecurityHas the has config
-     */
-    public HasClient(String hadoopSecurityHas) {
-        this.hadoopSecurityHas = hadoopSecurityHas;
-    }
-
-
-    public TgtTicket requestTgt() throws HasException {
-        HasConfig config;
-        if (hadoopSecurityHas == null) {
-            String hasClientConf = System.getenv("HAS_CLIENT_CONF");
-            if (hasClientConf == null) {
-                hasClientConf = HAS_CONFIG_DEFAULT;
-            }
-            LOG.debug("has-client conf path: " + hasClientConf);
-            File confFile = new File(hasClientConf);
-            if (!confFile.exists()) {
-                throw new HasException("The HAS client config file: " + hasClientConf
-                    + " does not exist.");
-            }
-            try {
-                config = HasUtil.getHasConfig(confFile);
-            } catch (HasException e) {
-                LOG.error("Failed to get has client config: " + e.getMessage());
-                throw new HasException("Failed to get has client config: " + e);
-            }
-        } else {
-            config = new HasConfig();
-            String[] urls = hadoopSecurityHas.split(";");
-            String host = "";
-            int port = 0;
-            try {
-                for (String url : urls) {
-                    URI uri = new URI(url.trim());
-
-                    // parse host
-                    host = host + uri.getHost() + ",";
-
-                    // parse port
-                    if (port == 0) {
-                        port = uri.getPort();
-                    } else {
-                        if (port != uri.getPort()) {
-                            throw new HasException("Invalid port: not even.");
-                        }
-                    }
-
-                    // We will get the auth type from env first
-                    type = System.getenv("auth_type");
-                    // parse host
-                    if (type == null) {
-                        String[] strs = uri.getQuery().split("=");
-                        if (strs[0].equals("auth_type")) {
-                            type = strs[1];
-                        } else {
-                            LOG.warn("No auth type in conf.");
-                        }
-                    }
-                }
-                if (host == null || port == 0) {
-                    throw new HasException("host is null.");
-                } else {
-                    host = host.substring(0, host.length() - 1);
-                    config.setString(HasConfigKey.HTTPS_HOST, host);
-                    config.setInt(HasConfigKey.HTTPS_PORT, port);
-                    config.setString(HasConfigKey.AUTH_TYPE, type);
-                }
-            } catch (URISyntaxException e) {
-                LOG.error("Errors occurred when getting web url. " + e.getMessage());
-                throw new HasException(
-                    "Errors occurred when getting web url. " + e.getMessage());
-            }
-        }
-        if (config == null) {
-            throw new HasException("Failed to get HAS client config.");
-        }
-        clientConfigFolder = new File("/etc/has/" + config.getHttpsHost());
-        if (!clientConfigFolder.exists()) {
-            clientConfigFolder.mkdirs();
-        }
-
-        // get and set ssl-client/trustStore first
-        String sslClientConfPath = clientConfigFolder + "/ssl-client.conf";
-        loadSslClientConf(config, sslClientConfPath);
-        config.setString(HasConfigKey.SSL_CLIENT_CONF, sslClientConfPath);
-
-        createKrb5Conf(config);
-
-        HasClientPlugin plugin;
-        try {
-            plugin = getClientTokenPlugin(config);
-        } catch (HasException e) {
-            LOG.error("Failed to get client token plugin from config: " + e.getMessage());
-            throw new HasException(
-                "Failed to get client token plugin from config: " + e.getMessage());
-        }
-        AuthToken authToken;
-        try {
-            authToken = plugin.login(config);
-        } catch (HasLoginException e) {
-            LOG.error("Plugin login failed: " + e.getMessage());
-            throw new HasException(
-                "Plugin login failed: " + e.getMessage());
-        }
-        type = plugin.getLoginType();
-
-        LOG.info("The plugin type is: " + type);
-
-        return requestTgt(authToken, type, config);
-    }
-
-    private void createKrb5Conf(HasConfig config) throws HasException {
-        HasAdminClient hasAdminClient = new HasAdminClient(config);
-        File krb5Conf = new File(clientConfigFolder + "/krb5.conf");
-        if (!krb5Conf.exists()) {
-            String content = hasAdminClient.getKrb5conf();
-            if (content == null) {
-                LOG.error("Failed to get krb5.conf.");
-                throw new HasException("Failed to get krb5.conf.");
-            }
-            try {
-                PrintStream ps = new PrintStream(new FileOutputStream(krb5Conf));
-                ps.println(content);
-                LOG.info("krb5.conf has saved in : " + krb5Conf.getAbsolutePath());
-            } catch (FileNotFoundException e) {
-                LOG.error(e.getMessage());
-                throw new HasException(e);
-            }
-        }
-        System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5Conf.getAbsolutePath());
-    }
-
-
-    private HasClientPlugin getClientTokenPlugin(HasConfig config) throws HasException {
-        String pluginName = config.getPluginName();
-        LOG.info("The plugin name getting from config is: " + pluginName);
-        HasClientPlugin clientPlugin;
-        if (pluginName != null) {
-            clientPlugin = HasClientPluginRegistry.createPlugin(pluginName);
-        } else {
-            throw new HasException("Please set the plugin name in has client conf");
-        }
-        if (clientPlugin == null) {
-            throw new HasException("Failed to create client plugin: " + pluginName);
-        }
-        LOG.info("The plugin class is: " + clientPlugin);
-
-        return clientPlugin;
-    }
-
-    /**
-     * Request a TGT with user token, plugin type and has config.
-     * @param authToken
-     * @param type
-     * @param config
-     * @return TGT
-     * @throws HasException e
-     */
-    public TgtTicket requestTgt(AuthToken authToken, String type, HasConfig config)
-        throws HasException {
-        TokenEncoder tokenEncoder = KrbRuntime.getTokenProvider("JWT").createTokenEncoder();
-
-        String tokenString;
-        try {
-            tokenString = tokenEncoder.encodeAsString(authToken);
-        } catch (KrbException e) {
-            LOG.debug("Failed to decode the auth token.");
-            throw new HasException("Failed to decode the auth token." + e.getMessage());
-        }
-
-        JSONObject json = null;
-        int responseStatus = 0;
-        boolean success = false;
-        if ((config.getHttpsPort() != null) && (config.getHttpsHost() != null)) {
-            String sslClientConfPath = clientConfigFolder + "/ssl-client.conf";
-            config.setString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
-            config.setString(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfPath);
-            config.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, false);
-
-            URLConnectionFactory connectionFactory = URLConnectionFactory
-                .newDefaultURLConnectionFactory(config);
-
-            URL url;
-            String[] hosts = config.getHttpsHost().split(",");
-            for (String host : hosts) {
-                try {
-                    url = new URL("https://" + host.trim() + ":" + config.getHttpsPort()
-                        + "/has/v1?type=" + type + "&authToken=" + tokenString);
-                } catch (MalformedURLException e) {
-                    LOG.warn("Failed to get url. " + e.toString());
-                    continue;
-                }
-                HttpURLConnection conn;
-                try {
-                    conn = (HttpURLConnection) connectionFactory.openConnection(url);
-                } catch (IOException e) {
-                    LOG.warn("Failed to open connection. " + e.toString());
-                    continue;
-                }
-
-                conn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
-                try {
-                    conn.setRequestMethod("PUT");
-                } catch (ProtocolException e) {
-                    LOG.warn("Failed to set request method. " + e.toString());
-                    continue;
-                }
-                conn.setDoOutput(true);
-                conn.setDoInput(true);
-
-                try {
-                    conn.connect();
-
-                    responseStatus = conn.getResponseCode();
-                    switch (responseStatus) {
-                        case 200:
-                        case 201:
-                            BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream()));
-                            StringBuilder sb = new StringBuilder();
-                            String line;
-                            while ((line = br.readLine()) != null) {
-                                sb.append(line + "\n");
-                            }
-                            br.close();
-
-                            json = new JSONObject(sb.toString());
-                    }
-
-                } catch (IOException | JSONException e) {
-                    LOG.warn("ERROR! " + e.toString());
-                    continue;
-                }
-
-                if (responseStatus == 200 || responseStatus == 201) {
-                    success = true;
-                    break;
-                }
-            }
-            if (!success) {
-                throw new HasException("Failed : HTTP error code : "
-                    + responseStatus);
-            }
-        } else {
-            WebResource webResource;
-            Client client = Client.create();
-            String[] hosts = config.getHttpHost().split(",");
-            for (String host : hosts) {
-                webResource = client
-                    .resource("http://" + host.trim() + ":" + config.getHttpPort()
-                        + "/has/v1?type=" + type + "&authToken="
-                        + tokenString);
-                try {
-                    ClientResponse response = webResource.accept("application/json")
-                        .put(ClientResponse.class);
-
-                    if (response.getStatus() != 200) {
-                        LOG.warn("WARN! " + response.getEntity(String.class));
-                        responseStatus = response.getStatus();
-                        continue;
-                    }
-                    json = response.getEntity(JSONObject.class);
-                } catch (ClientHandlerException e) {
-                    LOG.warn("WARN! " + e.toString());
-                    continue;
-                }
-                success = true;
-                break;
-            }
-            if (!success) {
-                throw new HasException("Failed : HTTP error code : "
-                    + responseStatus);
-            }
-        }
-
-        LOG.debug("Return from Server .... \n");
-
-        try {
-            return handleResponse(json, (String) authToken.getAttributes().get("passPhrase"));
-        } catch (HasException e) {
-            LOG.debug("Failed to handle response when requesting tgt ticket in client."
-                + e.getMessage());
-            throw new HasException(e);
-        }
-    }
-
-    private File loadSslClientConf(HasConfig config, String sslClientConfPath) throws HasException {
-        File sslClientConf = new File(sslClientConfPath);
-        if (!sslClientConf.exists()) {
-            String httpHost = config.getHttpHost();
-            String httpPort = config.getHttpPort();
-            if (httpHost == null) {
-                LOG.info("Can't find the http host in config, the https host will be used.");
-                httpHost = config.getHttpsHost();
-            }
-            if (httpPort == null) {
-                LOG.info("Can't find the http port in config, the default http port will be used.");
-                httpPort = HAS_HTTP_PORT_DEFAULT;
-            }
-            X509Certificate certificate = getCertificate(httpHost, httpPort);
-            if (verifyCertificate(certificate)) {
-                String password = createTrustStore(config.getHttpsHost(), certificate);
-                createClientSSLConfig(password);
-            } else {
-                throw new HasException("The certificate from HAS server is invalid.");
-            }
-        }
-        return sslClientConf;
-    }
-
-    public KrbMessage getKrbMessage(JSONObject json) throws HasException {
-
-        LOG.debug("Starting to get the message from has server.");
-
-        try {
-            boolean success = json.getBoolean("success");
-            if (!success) {
-                throw new HasException("Failed: " + json.getString("krbMessage"));
-            }
-        } catch (JSONException e) {
-            LOG.debug("Failed to get message." + e);
-            throw new HasException("Failed to get message." + e);
-        }
-
-        String typeString;
-        try {
-            typeString = json.getString("type");
-        } catch (JSONException e) {
-            LOG.debug("Failed to get message." + e);
-            throw new HasException("Failed to get message." + e);
-        }
-
-        if (typeString != null && typeString.equals(type)) {
-            LOG.debug("The message type is " + type);
-            String krbMessageString = null;
-            try {
-                krbMessageString = json.getString("krbMessage");
-            } catch (JSONException e) {
-                LOG.debug("Failed to get the krbMessage. " + e);
-            }
-            Base64 base64 = new Base64(0);
-            byte[] krbMessage = base64.decode(krbMessageString);
-            ByteBuffer byteBuffer = ByteBuffer.wrap(krbMessage);
-            KrbMessage kdcRep;
-            try {
-                kdcRep = KrbCodec.decodeMessage(byteBuffer);
-            } catch (IOException e) {
-                throw new HasException("Krb decoding message failed", e);
-            }
-            return kdcRep;
-        } else {
-            throw new HasException("Can't get the right message from server.");
-        }
-    }
-
-    public TgtTicket handleResponse(JSONObject json, String passPhrase)
-        throws HasException {
-        KrbMessage kdcRep = getKrbMessage(json);
-
-        KrbMessageType messageType = kdcRep.getMsgType();
-        if (messageType == KrbMessageType.AS_REP) {
-            return processResponse((KdcRep) kdcRep, passPhrase);
-        } else if (messageType == KrbMessageType.KRB_ERROR) {
-            KrbError error = (KrbError) kdcRep;
-            LOG.error("KDC server response with message: "
-                + error.getErrorCode().getMessage());
-
-            throw new HasException(error.getEtext());
-        }
-        return null;
-    }
-
-    public TgtTicket processResponse(KdcRep kdcRep, String passPhrase)
-        throws HasException {
-
-        PrincipalName clientPrincipal = kdcRep.getCname();
-        String clientRealm = kdcRep.getCrealm();
-        clientPrincipal.setRealm(clientRealm);
-
-        // Get the client to decrypt the EncryptedData
-        EncryptionKey clientKey = null;
-        try {
-            clientKey = HasUtil.getClientKey(clientPrincipal.getName(),
-                passPhrase,
-                kdcRep.getEncryptedEncPart().getEType());
-        } catch (KrbException e) {
-            throw new HasException("Could not generate key. " + e.getMessage());
-        }
-
-        byte[] decryptedData = decryptWithClientKey(kdcRep.getEncryptedEncPart(),
-            KeyUsage.AS_REP_ENCPART, clientKey);
-        if ((decryptedData[0] & 0x1f) == 26) {
-            decryptedData[0] = (byte) (decryptedData[0] - 1);
-        }
-        EncKdcRepPart encKdcRepPart = new EncAsRepPart();
-        try {
-            encKdcRepPart.decode(decryptedData);
-        } catch (IOException e) {
-            throw new HasException("Failed to decode EncAsRepPart", e);
-        }
-        kdcRep.setEncPart(encKdcRepPart);
-
-//        if (getChosenNonce() != encKdcRepPart.getNonce()) {
-//            throw new KrbException("Nonce didn't match");
-//        }
-
-//        PrincipalName returnedServerPrincipal = encKdcRepPart.getSname();
-//        returnedServerPrincipal.setRealm(encKdcRepPart.getSrealm());
-//        PrincipalName requestedServerPrincipal = getServerPrincipal();
-//        if (requestedServerPrincipal.getRealm() == null) {
-//            requestedServerPrincipal.setRealm(getContext().getKrbSetting().getKdcRealm());
-//        }
-//        if (!returnedServerPrincipal.equals(requestedServerPrincipal)) {
-//            throw new KrbException(KrbErrorCode.KDC_ERR_SERVER_NOMATCH);
-//        }
-
-//        HostAddresses hostAddresses = getHostAddresses();
-//        if (hostAddresses != null) {
-//            List<HostAddress> requestHosts = hostAddresses.getElements();
-//            if (!requestHosts.isEmpty()) {
-//                List<HostAddress> responseHosts = encKdcRepPart.getCaddr().getElements();
-//                for (HostAddress h : requestHosts) {
-//                    if (!responseHosts.contains(h)) {
-//                        throw new KrbException("Unexpected client host");
-//                    }
-//                }
-//            }
-//        }
-
-        TgtTicket tgtTicket = getTicket(kdcRep);
-        LOG.info("Ticket expire time: " + tgtTicket.getEncKdcRepPart().getEndTime());
-        return tgtTicket;
-
-    }
-
-    protected byte[] decryptWithClientKey(EncryptedData data,
-                                          KeyUsage usage,
-                                          EncryptionKey clientKey) throws HasException {
-        if (clientKey == null) {
-            throw new HasException("Client key isn't available");
-        }
-        try {
-            return EncryptionHandler.decrypt(data, clientKey, usage);
-        } catch (KrbException e) {
-            throw new HasException("Errors occurred when decrypting the data." + e.getMessage());
-        }
-    }
-
-    /**
-     * Get the tgt ticket from KdcRep
-     *
-     * @param kdcRep
-     */
-    public TgtTicket getTicket(KdcRep kdcRep) {
-        TgtTicket tgtTicket = new TgtTicket(kdcRep.getTicket(),
-            (EncAsRepPart) kdcRep.getEncPart(), kdcRep.getCname());
-        return tgtTicket;
-    }
-
-    /**
-     * Get certificate from HAS server.
-     *
-     */
-    private X509Certificate getCertificate(String host, String port) throws HasException {
-        X509Certificate certificate;
-        Client client = Client.create();
-        WebResource webResource = client.resource("http://" + host + ":" + port + "/has/v1/getcert");
-        ClientResponse response = webResource.get(ClientResponse.class);
-        if (response.getStatus() != 200) {
-            throw new HasException(response.getEntity(String.class));
-        }
-        try {
-            CertificateFactory factory = CertificateFactory.getInstance("X.509");
-            InputStream in = response.getEntityInputStream();
-            certificate = (X509Certificate) factory.generateCertificate(in);
-        } catch (CertificateException e) {
-            throw new HasException("Failed to get certificate from HAS server", e);
-        }
-
-        return certificate;
-    }
-
-    /**
-     * Verify certificate.
-     */
-    private boolean verifyCertificate(X509Certificate certificate) throws HasException {
-        // Check if certificate is expired
-        try {
-            Date date = new Date();
-            certificate.checkValidity(date);
-        } catch (GeneralSecurityException e) {
-            return false;
-        }
-
-        // Get certificate from ca root
-        X509Certificate caRoot;
-        try {
-            //Get the ca root path from env, client should export it.
-            String caRootPath = System.getenv("CA_ROOT");
-            if (caRootPath == null) {
-                caRootPath = CA_ROOT_DEFAULT;
-            }
-            File caRootFile;
-            if (caRootPath != null) {
-                caRootFile = new File(caRootPath);
-                if (!caRootFile.exists()) {
-                    throw new HasException("CA_ROOT: " + caRootPath + " not exist.");
-                }
-            } else {
-                throw new HasException("Please set the CA_ROOT.");
-            }
-
-            CertificateFactory factory = CertificateFactory.getInstance("X.509");
-            FileInputStream in = new FileInputStream(caRootFile);
-            caRoot = (X509Certificate) factory.generateCertificate(in);
-        } catch (CertificateException | FileNotFoundException e) {
-            throw new HasException("Failed to get certificate from ca root file", e);
-        }
-
-        // Verify certificate with root certificate
-        try {
-            PublicKey publicKey = caRoot.getPublicKey();
-            certificate.verify(publicKey);
-        } catch (GeneralSecurityException e) {
-            return false;
-        }
-
-        return true;
-    }
-
-    /**
-     * Create and save truststore file based on certificate.
-     *
-     */
-    private String createTrustStore(String host, X509Certificate certificate) throws HasException {
-        KeyStore trustStore;
-
-        // Create password
-        RandomStringGenerator generator = new RandomStringGenerator.Builder()
-            .withinRange('a', 'z')
-            .filteredBy(CharacterPredicates.LETTERS, CharacterPredicates.DIGITS)
-            .build();
-        String password = generator.generate(15);
-
-        File trustStoreFile = new File(clientConfigFolder + "/truststore.jks");
-        try {
-            trustStore = KeyStore.getInstance("jks");
-            trustStore.load(null, null);
-            trustStore.setCertificateEntry(host, certificate);
-            FileOutputStream out = new FileOutputStream(trustStoreFile);
-            trustStore.store(out, password.toCharArray());
-            out.close();
-        } catch (IOException | GeneralSecurityException e) {
-            throw new HasException("Failed to create and save truststore file", e);
-        }
-        return password;
-    }
-
-    /**
-     * Create ssl configuration file for client.
-     *
-     */
-    private void createClientSSLConfig(String password) throws HasException {
-        String resourcePath = "/ssl-client.conf.template";
-        InputStream templateResource = getClass().getResourceAsStream(resourcePath);
-        try {
-            String content = IOUtil.readInput(templateResource);
-            content = content.replaceAll("_location_", clientConfigFolder.getAbsolutePath()
-                + "/truststore.jks");
-            content = content.replaceAll("_password_", password);
-
-            IOUtil.writeFile(content, new File(clientConfigFolder + "/ssl-client.conf"));
-        } catch (IOException e) {
-            throw new HasException("Failed to create client ssl configuration file", e);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPlugin.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPlugin.java
deleted file mode 100644
index 4bd0749..0000000
--- a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPlugin.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.has.client;
-
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-
-public interface HasClientPlugin {
-
-    /**
-     * Get the login module type ID, used to distinguish this module from others.
-     * Should correspond to the server side module.
-     *
-     * @return login type
-     */
-    String getLoginType();
-
-    /**
-     * Perform all the client side login logics, the results wrapped in an AuthToken,
-     * will be validated by HAS server.
-     *
-     * @param conf token plugin config
-     * @return user auth token
-     */
-    AuthToken login(HasConfig conf) throws HasLoginException;
-}


[11/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasJaasLoginUtil.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasJaasLoginUtil.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasJaasLoginUtil.java
deleted file mode 100644
index e824ea4..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasJaasLoginUtil.java
+++ /dev/null
@@ -1,261 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.common.util;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.security.auth.Subject;
-import javax.security.auth.kerberos.KerberosPrincipal;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
-import java.io.File;
-import java.io.IOException;
-import java.security.Principal;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * JAAS utilities for Has login.
- */
-public class HasJaasLoginUtil {
-    public static final Logger LOG = LoggerFactory.getLogger(HasJaasLoginUtil.class);
-
-    public static final boolean ENABLE_DEBUG = true;
-
-    private static String getKrb5LoginModuleName() {
-        return System.getProperty("java.vendor").contains("IBM")
-            ? "com.ibm.security.auth.module.Krb5LoginModule"
-            : "org.apache.hadoop.has.client.HasLoginModule";
-    }
-
-    /**
-     * Log a user in from a tgt ticket.
-     *
-     * @throws IOException
-     */
-    public static synchronized Subject loginUserFromTgtTicket(String hadoopSecurityHas) throws IOException {
-
-        TICKET_KERBEROS_OPTIONS.put("hadoopSecurityHas", hadoopSecurityHas);
-        Subject subject = new Subject();
-        Configuration conf = new HasJaasConf();
-        String confName = "ticket-kerberos";
-        LoginContext loginContext = null;
-        try {
-            loginContext = new LoginContext(confName, subject, null, conf);
-        } catch (LoginException e) {
-            throw new IOException("Fail to create LoginContext for " + e);
-        }
-        try {
-            loginContext.login();
-            LOG.info("Login successful for user "
-                + subject.getPrincipals().iterator().next().getName());
-        } catch (LoginException e) {
-            throw new IOException("Login failure for " + e);
-        }
-        return loginContext.getSubject();
-    }
-
-    /**
-     * Has Jaas config.
-     */
-    static class HasJaasConf extends Configuration {
-        @Override
-        public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
-
-            return new AppConfigurationEntry[]{
-                TICKET_KERBEROS_LOGIN};
-        }
-    }
-
-    private static final Map<String, String> BASIC_JAAS_OPTIONS =
-        new HashMap<String, String>();
-
-    static {
-        String jaasEnvVar = System.getenv("HADOOP_JAAS_DEBUG");
-        if (jaasEnvVar != null && "true".equalsIgnoreCase(jaasEnvVar)) {
-            BASIC_JAAS_OPTIONS.put("debug", String.valueOf(ENABLE_DEBUG));
-        }
-    }
-
-    private static final Map<String, String> TICKET_KERBEROS_OPTIONS =
-        new HashMap<String, String>();
-
-    static {
-        TICKET_KERBEROS_OPTIONS.put("doNotPrompt", "true");
-        TICKET_KERBEROS_OPTIONS.put("useTgtTicket", "true");
-        TICKET_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
-    }
-
-    private static final AppConfigurationEntry TICKET_KERBEROS_LOGIN =
-        new AppConfigurationEntry(getKrb5LoginModuleName(),
-            AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL,
-            TICKET_KERBEROS_OPTIONS);
-
-
-     public static Subject loginUsingTicketCache(
-        String principal, File cacheFile) throws IOException {
-        Set<Principal> principals = new HashSet<Principal>();
-        principals.add(new KerberosPrincipal(principal));
-
-        Subject subject = new Subject(false, principals,
-            new HashSet<Object>(), new HashSet<Object>());
-
-        Configuration conf = useTicketCache(principal, cacheFile);
-        String confName = "TicketCacheConf";
-         LoginContext loginContext = null;
-         try {
-             loginContext = new LoginContext(confName, subject, null, conf);
-         } catch (LoginException e) {
-             throw new IOException("Faill to create LoginContext for " + e);
-         }
-         try {
-             loginContext.login();
-             LOG.info("Login successful for user "
-                 + subject.getPrincipals().iterator().next().getName());
-         } catch (LoginException e) {
-             throw new IOException("Login failure for " + e);
-         }
-         return loginContext.getSubject();
-    }
-
-    public static Subject loginUsingKeytab(
-        String principal, File keytabFile) throws IOException {
-        Set<Principal> principals = new HashSet<Principal>();
-        principals.add(new KerberosPrincipal(principal));
-
-        Subject subject = new Subject(false, principals,
-            new HashSet<Object>(), new HashSet<Object>());
-
-        Configuration conf = useKeytab(principal, keytabFile);
-        String confName = "KeytabConf";
-        LoginContext loginContext = null;
-        try {
-            loginContext = new LoginContext(confName, subject, null, conf);
-        } catch (LoginException e) {
-            throw new IOException("Fail to create LoginContext for " + e);
-        }
-        try {
-            loginContext.login();
-             LOG.info("Login successful for user "
-                + subject.getPrincipals().iterator().next().getName());
-        } catch (LoginException e) {
-            throw new IOException("Login failure for " + e);
-        }
-        return loginContext.getSubject();
-    }
-
-    public static LoginContext loginUsingKeytabReturnContext(
-        String principal, File keytabFile) throws IOException {
-        Set<Principal> principals = new HashSet<Principal>();
-        principals.add(new KerberosPrincipal(principal));
-
-        Subject subject = new Subject(false, principals,
-            new HashSet<Object>(), new HashSet<Object>());
-
-        Configuration conf = useKeytab(principal, keytabFile);
-        String confName = "KeytabConf";
-        LoginContext loginContext = null;
-        try {
-            loginContext = new LoginContext(confName, subject, null, conf);
-        } catch (LoginException e) {
-            throw new IOException("Fail to create LoginContext for " + e);
-        }
-        try {
-            loginContext.login();
-            LOG.info("Login successful for user "
-                + subject.getPrincipals().iterator().next().getName());
-        } catch (LoginException e) {
-            throw new IOException("Login failure for " + e);
-        }
-        return loginContext;
-    }
-
-    public static Configuration useTicketCache(String principal,
-                                               File credentialFile) {
-        return new TicketCacheJaasConf(principal, credentialFile);
-    }
-
-    public static Configuration useKeytab(String principal, File keytabFile) {
-        return new KeytabJaasConf(principal, keytabFile);
-    }
-
-    static class TicketCacheJaasConf extends Configuration {
-        private String principal;
-        private File clientCredentialFile;
-
-        TicketCacheJaasConf(String principal, File clientCredentialFile) {
-            this.principal = principal;
-            this.clientCredentialFile = clientCredentialFile;
-        }
-
-        @Override
-        public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
-            Map<String, String> options = new HashMap<String, String>();
-            options.put("principal", principal);
-            options.put("storeKey", "false");
-            options.put("doNotPrompt", "false");
-            options.put("useTicketCache", "true");
-            options.put("renewTGT", "true");
-            options.put("refreshKrb5Config", "true");
-            options.put("isInitiator", "true");
-            options.put("ticketCache", clientCredentialFile.getAbsolutePath());
-            options.putAll(BASIC_JAAS_OPTIONS);
-
-            return new AppConfigurationEntry[]{
-                new AppConfigurationEntry(getKrb5LoginModuleName(),
-                    AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                    options)};
-        }
-    }
-
-    static class KeytabJaasConf extends Configuration {
-        private String principal;
-        private File keytabFile;
-
-        KeytabJaasConf(String principal, File keytab) {
-            this.principal = principal;
-            this.keytabFile = keytab;
-        }
-
-        @Override
-        public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
-            Map<String, String> options = new HashMap<String, String>();
-            options.put("keyTab", keytabFile.getAbsolutePath());
-            options.put("principal", principal);
-            options.put("useKeyTab", "true");
-            options.put("storeKey", "true");
-            options.put("doNotPrompt", "true");
-            options.put("renewTGT", "false");
-            options.put("refreshKrb5Config", "true");
-            options.put("isInitiator", "true");
-            options.putAll(BASIC_JAAS_OPTIONS);
-
-            return new AppConfigurationEntry[]{
-                new AppConfigurationEntry(getKrb5LoginModuleName(),
-                    AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                    options)};
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasUtil.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasUtil.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasUtil.java
deleted file mode 100644
index 1d9f4b7..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasUtil.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.common.util;
-
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.crypto.EncryptionHandler;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.PrintStream;
-
-public class HasUtil {
-
-    public static EncryptionKey getClientKey(String userName, String passPhrase,
-                                             EncryptionType type) throws KrbException {
-        EncryptionKey clientKey = EncryptionHandler.string2Key(userName,
-            passPhrase, type);
-        return clientKey;
-    }
-
-    /**
-     * Get has configuration
-     * @param hasConfFile configuration directory
-     * @return has configuration
-     * @throws KrbException e
-     */
-    public static HasConfig getHasConfig(File hasConfFile) throws HasException {
-
-        if (hasConfFile.exists()) {
-            HasConfig hasConfig = new HasConfig();
-            try {
-                hasConfig.addIniConfig(hasConfFile);
-            } catch (IOException e) {
-                throw new HasException("Can not load the has configuration file "
-                    + hasConfFile.getAbsolutePath());
-            }
-            return hasConfig;
-        }
-
-        return null;
-    }
-
-    public static void setEnableConf(File hasConfFile, String value)
-            throws HasException, IOException {
-        String oldValue = getHasConfig(hasConfFile).getEnableConf();
-        if (oldValue == null) {
-            throw new HasException("Please set enable_conf in has-server.conf.");
-        }
-        if (oldValue.equals(value)) {
-            return;
-        }
-        try {
-            BufferedReader bf = new BufferedReader(new FileReader(hasConfFile));
-            StringBuilder sb = new StringBuilder();
-            String tempString;
-            while ((tempString = bf.readLine()) != null) {
-                if (tempString.trim().startsWith("enable_conf")) {
-                    tempString = tempString.replace(oldValue, value);
-                }
-                sb.append(tempString + "\n");
-            }
-            PrintStream ps = new PrintStream(new FileOutputStream(hasConfFile));
-            ps.print(sb.toString());
-            bf.close();
-        } catch (FileNotFoundException e) {
-            throw new HasException("Can not load the has configuration file "
-                    + hasConfFile.getAbsolutePath());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/util/PlatformName.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/PlatformName.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/PlatformName.java
deleted file mode 100644
index 6f64c62..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/PlatformName.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.has.common.util;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- * A helper class for getting build-info of the java-vm.
- *
- */
-@InterfaceAudience.LimitedPrivate({"HBase"})
-@InterfaceStability.Unstable
-public class PlatformName {
-  /**
-   * The complete platform 'name' to identify the platform as
-   * per the java-vm.
-   */
-  public static final String PLATFORM_NAME =
-      (System.getProperty("os.name").startsWith("Windows")
-      ? System.getenv("os") : System.getProperty("os.name"))
-      + "-" + System.getProperty("os.arch")
-      + "-" + System.getProperty("sun.arch.data.model");
-
-  /**
-   * The java vendor name used in this platform.
-   */
-  public static final String JAVA_VENDOR_NAME = System.getProperty("java.vendor");
-
-  /**
-   * A public static variable to indicate the current java vendor is
-   * IBM java or not.
-   */
-  public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM");
-
-  public static void main(String[] args) {
-    System.out.println(PLATFORM_NAME);
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/util/StringUtils.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/StringUtils.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/StringUtils.java
deleted file mode 100644
index 2b00904..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/StringUtils.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.has.common.util;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-import java.util.Locale;
-
-/**
- * General string utils
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class StringUtils {
-
-  /**
-   * Converts all of the characters in this String to lower case with
-   * Locale.ENGLISH.
-   *
-   * @param str  string to be converted
-   * @return     the str, converted to lowercase.
-   */
-  public static String toLowerCase(String str) {
-    return str.toLowerCase(Locale.ENGLISH);
-  }
-
-  /**
-   * Converts all of the characters in this String to upper case with
-   * Locale.ENGLISH.
-   *
-   * @param str  string to be converted
-   * @return     the str, converted to uppercase.
-   */
-  public static String toUpperCase(String str) {
-    return str.toUpperCase(Locale.ENGLISH);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/util/URLConnectionFactory.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/URLConnectionFactory.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/URLConnectionFactory.java
deleted file mode 100644
index a818864..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/URLConnectionFactory.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.has.common.util;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.spnego.AuthenticatedURL;
-import org.apache.hadoop.has.common.spnego.AuthenticationException;
-import org.apache.hadoop.has.common.spnego.KerberosHasAuthenticator;
-import org.apache.hadoop.has.common.ssl.SSLFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.net.ssl.HostnameVerifier;
-import javax.net.ssl.HttpsURLConnection;
-import javax.net.ssl.SSLSocketFactory;
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.net.URLConnection;
-import java.security.GeneralSecurityException;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- * Utilities for handling URLs
- */
-@InterfaceStability.Unstable
-public class URLConnectionFactory {
-  private static final Logger LOG = LoggerFactory
-      .getLogger(URLConnectionFactory.class);
-
-  /**
-   * Timeout for socket connects and reads
-   */
-   // 1 minute
-  public static final int DEFAULT_SOCKET_TIMEOUT = 60 * 1000;
-  private final ConnectionConfigurator connConfigurator;
-
-  private static final ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR
-      = new ConnectionConfigurator() {
-        @Override
-        public HttpURLConnection configure(HttpURLConnection conn)
-            throws IOException {
-          URLConnectionFactory.setTimeouts(conn,
-                                           DEFAULT_SOCKET_TIMEOUT,
-                                           DEFAULT_SOCKET_TIMEOUT);
-          return conn;
-        }
-      };
-
-  /**
-   * The URLConnectionFactory that sets the default timeout and it only trusts
-   * Java's SSL certificates.
-   */
-  public static final URLConnectionFactory DEFAULT_SYSTEM_CONNECTION_FACTORY =
-      new URLConnectionFactory(DEFAULT_TIMEOUT_CONN_CONFIGURATOR);
-
-  /**
-   * Construct a new URLConnectionFactory based on the configuration. It will
-   * try to load SSL certificates when it is specified.
-   */
-  public static URLConnectionFactory newDefaultURLConnectionFactory(HasConfig conf) {
-    ConnectionConfigurator conn = null;
-    try {
-      conn = newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
-    } catch (Exception e) {
-      LOG.debug(
-          "Cannot load customized ssl related configuration. Fallback to system-generic settings.",
-          e);
-      conn = DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
-    }
-    return new URLConnectionFactory(conn);
-  }
-
-  private static ConnectionConfigurator getSSLConnectionConfiguration(
-      HasConfig conf) {
-    ConnectionConfigurator conn;
-    try {
-      conn = newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
-    } catch (Exception e) {
-      LOG.warn(
-          "Cannot load customized ssl related configuration. Fallback to"
-              + " system-generic settings.",
-          e);
-      conn = DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
-    }
-
-    return conn;
-  }
-
-  @VisibleForTesting
-  URLConnectionFactory(ConnectionConfigurator connConfigurator) {
-    this.connConfigurator = connConfigurator;
-  }
-
-  /**
-   * Create a new ConnectionConfigurator for SSL connections
-   */
-  private static ConnectionConfigurator newSslConnConfigurator(
-      final int defaultTimeout, HasConfig conf)
-      throws IOException, GeneralSecurityException, HasException {
-    final SSLFactory factory;
-    final SSLSocketFactory sf;
-    final HostnameVerifier hv;
-    final int connectTimeout;
-    final int readTimeout;
-
-    factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
-    factory.init();
-    sf = factory.createSSLSocketFactory();
-    hv = factory.getHostnameVerifier();
-
-    connectTimeout = defaultTimeout;
-
-    readTimeout = defaultTimeout;
-
-    return new ConnectionConfigurator() {
-      @Override
-      public HttpURLConnection configure(HttpURLConnection conn)
-          throws IOException {
-        if (conn instanceof HttpsURLConnection) {
-          HttpsURLConnection c = (HttpsURLConnection) conn;
-          c.setSSLSocketFactory(sf);
-          c.setHostnameVerifier(hv);
-        }
-        URLConnectionFactory.setTimeouts(conn, connectTimeout, readTimeout);
-        return conn;
-      }
-    };
-  }
-
-  /**
-   * Opens a url with read and connect timeouts
-   *
-   * @param url
-   *          to open
-   * @return URLConnection
-   * @throws IOException
-   */
-  public URLConnection openConnection(URL url) throws IOException {
-    try {
-      return openConnection(url, false, null);
-    } catch (AuthenticationException e) {
-      // Unreachable
-      LOG.error("Open connection {} failed", url, e);
-      return null;
-    }
-  }
-
-  /**
-   * Opens a url with read and connect timeouts
-   *
-   * @param url
-   *          URL to open
-   * @param isSpnego
-   *          whether the url should be authenticated via SPNEGO
-   * @return URLConnection
-   * @throws IOException
-   * @throws AuthenticationException
-   */
-  public URLConnection openConnection(URL url, boolean isSpnego, HasConfig hasConfig)
-      throws IOException, AuthenticationException {
-    if (isSpnego && (hasConfig != null)) {
-      LOG.debug("open AuthenticatedURL connection {}", url);
-//      UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab();
-      final AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
-      return new AuthenticatedURL(new KerberosHasAuthenticator(hasConfig.getAdminKeytab(),
-          hasConfig.getAdminKeytabPrincipal()),
-          connConfigurator).openConnection(url, authToken);
-    } else {
-      LOG.debug("open URL connection");
-      URLConnection connection = url.openConnection();
-      if (connection instanceof HttpURLConnection) {
-        connConfigurator.configure((HttpURLConnection) connection);
-      }
-      return connection;
-    }
-  }
-
-  /**
-   * Sets timeout parameters on the given URLConnection.
-   *
-   * @param connection
-   *          URLConnection to set
-   * @param connectTimeout
-   *          the connection and read timeout of the connection.
-   */
-  private static void setTimeouts(URLConnection connection,
-                                  int connectTimeout,
-                                  int readTimeout) {
-    connection.setConnectTimeout(connectTimeout);
-    connection.setReadTimeout(readTimeout);
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/HasAdmin.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/HasAdmin.java b/has/has-common/src/main/java/org/apache/kerby/has/common/HasAdmin.java
new file mode 100644
index 0000000..30b1e35
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/HasAdmin.java
@@ -0,0 +1,140 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License. 
+ *
+ */
+package org.apache.kerby.has.common;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * Server side admin facilities from remote, similar to MIT kadmin remote mode.
+ */
+public interface HasAdmin {
+
+    /**
+     * Get the hadmin principal name.
+     *
+     * @return The hadmin principal name.
+     */
+    String getHadminPrincipal();
+
+    /**
+     * Add principal to backend.
+     *
+     * @param principal The principal to be added into backend
+     * @throws HasException e
+     */
+    void addPrincipal(String principal) throws HasException;
+
+    /**
+     * Add principal to backend.
+     *
+     * @param principal The principal to be added into backend
+     * @param password  The password to create encryption key
+     * @throws HasException e
+     */
+    void addPrincipal(String principal, String password) throws HasException;
+
+    /**
+     * Export all the keys of the specified principal into the specified keytab
+     * file.
+     *
+     * @param keytabFile The keytab file
+     * @param principal The principal name
+     * @throws HasException e
+     */
+    void exportKeytab(File keytabFile, String principal) throws HasException;
+
+    /**
+     * Export all the keys of the specified principals into the specified keytab
+     * file.
+     *
+     * @param keytabFile The keytab file
+     * @param principals The principal names
+     * @throws HasException e
+     */
+    void exportKeytab(File keytabFile, List<String> principals) throws HasException;
+
+    /**
+     * Delete the principal in backend.
+     *
+     * @param principal The principal to be deleted from backend
+     * @throws HasException e
+     */
+    void deletePrincipal(String principal) throws HasException;
+
+    /**
+     * Rename the principal.
+     *
+     * @param oldPrincipalName The original principal name
+     * @param newPrincipalName The new principal name
+     * @throws HasException e
+     */
+    void renamePrincipal(String oldPrincipalName,
+                         String newPrincipalName) throws HasException;
+
+    /**
+     * Get all the principal names from backend.
+     *
+     * @return principal list
+     * @throws HasException e
+     */
+    List<String> getPrincipals() throws HasException;
+
+    /**
+     * Get all the principal names that meets the pattern
+     *
+     * @param globString The glob string for matching
+     * @return Principal names
+     * @throws HasException e
+     */
+    List<String> getPrincipals(String globString) throws HasException;
+
+    /**
+     * Change the password of specified principal.
+     *
+     * @param principal The principal to be updated password
+     * @param newPassword The new password
+     * @throws HasException e
+     */
+//    void changePassword(String principal, String newPassword) throws HasException;
+
+    /**
+     * Update the random keys of specified principal.
+     *
+     * @param principal The principal to be updated keys
+     * @throws HasException e
+     */
+//    void updateKeys(String principal) throws HasException;
+
+    /**
+     * Release any resources associated.
+     *
+     * @throws HasException e
+     */
+//    void release() throws HasException;
+
+    String addPrincByRole(String host, String role) throws HasException;
+
+    File getKeytabByHostAndRole(String host, String role) throws HasException;
+
+    int size() throws HasException;
+
+    void setEnableOfConf(String isEnable) throws HasException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/HasConfig.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/HasConfig.java b/has/has-common/src/main/java/org/apache/kerby/has/common/HasConfig.java
new file mode 100644
index 0000000..e0f3f1e
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/HasConfig.java
@@ -0,0 +1,103 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.common;
+
+import org.apache.kerby.kerberos.kerb.common.Krb5Conf;
+
+import java.io.File;
+
+/**
+ * AK configuration API.
+ */
+public class HasConfig extends Krb5Conf {
+    private File confDir;
+
+    public void setConfDir(File dir) {
+        this.confDir = dir;
+    }
+
+    public File getConfDir() {
+        return confDir;
+    }
+
+    public String getHttpsHost() {
+        return getString(HasConfigKey.HTTPS_HOST, false, "HAS");
+    }
+
+    public String getHttpsPort() {
+        return getString(HasConfigKey.HTTPS_PORT, false, "HAS");
+    }
+
+    public String getHttpHost() {
+        return getString(HasConfigKey.HTTP_HOST, false, "HAS");
+    }
+
+    public String getHttpPort() {
+        return getString(HasConfigKey.HTTP_PORT, false, "HAS");
+    }
+
+    public String getPluginName() {
+        return getString(HasConfigKey.AUTH_TYPE, true, "PLUGIN");
+    }
+
+    public String getRealm() {
+        return getString(HasConfigKey.REALM, false, "HAS");
+    }
+
+    public String getSslServerConf() {
+        return getString(HasConfigKey.SSL_SERVER_CONF, true, "HAS");
+    }
+
+    public String getSslClientConf() {
+        return getString(HasConfigKey.SSL_CLIENT_CONF, true, "HAS");
+    }
+
+    public String getFilterAuthType() {
+        return getString(HasConfigKey.FILTER_AUTH_TYPE, true, "HAS");
+    }
+
+    public String getKerberosPrincipal() {
+        return getString(HasConfigKey.KERBEROS_PRINCIPAL, false, "HAS");
+    }
+
+    public String getKerberosKeytab() {
+        return getString(HasConfigKey.KERBEROS_KEYTAB, false, "HAS");
+    }
+
+    public String getKerberosNameRules() {
+        return getString(HasConfigKey.KERBEROS_NAME_RULES, false, "HAS");
+    }
+
+    public String getAdminKeytab() {
+        return getString(HasConfigKey.ADMIN_KEYTAB, false, "HAS");
+    }
+
+    public String getAdminKeytabPrincipal() {
+        return getString(HasConfigKey.ADMIN_KEYTAB_PRINCIPAL, false, "HAS");
+    }
+
+    public String getEnableConf() {
+        return getString(HasConfigKey.ENABLE_CONF, false, "HAS");
+    }
+
+    public String getSslClientCert() {
+        return getString(HasConfigKey.SSL_CLIENT_CERT, true, "HAS");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/HasConfigKey.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/HasConfigKey.java b/has/has-common/src/main/java/org/apache/kerby/has/common/HasConfigKey.java
new file mode 100644
index 0000000..272ab0e
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/HasConfigKey.java
@@ -0,0 +1,61 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.common;
+
+import org.apache.kerby.config.ConfigKey;
+
+public enum HasConfigKey implements ConfigKey {
+    HTTP_HOST,
+    HTTP_PORT,
+    HTTPS_HOST,
+    HTTPS_PORT,
+    AUTH_TYPE("RAM"),
+    REALM,
+    ENABLE_CONF,
+    SSL_SERVER_CONF("/etc/has/ssl-server.conf"),
+    SSL_CLIENT_CONF("/etc/has/ssl-client.conf"),
+    SSL_CLIENT_CERT("/etc/has/cert-signed"),
+    FILTER_AUTH_TYPE("kerberos"),
+    KERBEROS_PRINCIPAL,
+    KERBEROS_KEYTAB,
+    KERBEROS_NAME_RULES,
+    ADMIN_KEYTAB,
+    ADMIN_KEYTAB_PRINCIPAL;
+
+    private Object defaultValue;
+
+    HasConfigKey() {
+        this.defaultValue = null;
+    }
+
+    HasConfigKey(Object defaultValue) {
+        this.defaultValue = defaultValue;
+    }
+
+    @Override
+    public String getPropertyKey() {
+        return name().toLowerCase();
+    }
+
+    @Override
+    public Object getDefaultValue() {
+        return this.defaultValue;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/HasException.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/HasException.java b/has/has-common/src/main/java/org/apache/kerby/has/common/HasException.java
new file mode 100644
index 0000000..9e5db44
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/HasException.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kerby.has.common;
+
+public class HasException extends Exception {
+
+    private static final long serialVersionUID = -1916788959202646914L;
+
+    /**
+     * Creates an {@link HasException}.
+     *
+     * @param cause original exception.
+     */
+    public HasException(Throwable cause) {
+        super(cause);
+    }
+
+    /**
+     * Creates an {@link HasException}.
+     *
+     * @param message exception message.
+     */
+    public HasException(String message) {
+        super(message);
+    }
+
+    /**
+     * Creates an {@link HasException}.
+     *
+     * @param message exception message.
+     * @param cause   original exception.
+     */
+    public HasException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthToken.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthToken.java b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthToken.java
new file mode 100644
index 0000000..bacc740
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthToken.java
@@ -0,0 +1,217 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.kerby.has.common.spnego;
+
+import java.security.Principal;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.StringTokenizer;
+
+/**
+ * Borrow the class from Apache hadoop
+ */
+public class AuthToken implements Principal {
+
+  /**
+   * Constant that identifies an anonymous request.
+   */
+
+  private static final String ATTR_SEPARATOR = "&";
+  private static final String USER_NAME = "u";
+  private static final String PRINCIPAL = "p";
+  private static final String EXPIRES = "e";
+  private static final String TYPE = "t";
+
+  private static final Set<String> ATTRIBUTES =
+    new HashSet<String>(Arrays.asList(USER_NAME, PRINCIPAL, EXPIRES, TYPE));
+
+  private String userName;
+  private String principal;
+  private String type;
+  private long expires;
+  private String tokenStr;
+
+  protected AuthToken() {
+    userName = null;
+    principal = null;
+    type = null;
+    expires = -1;
+    tokenStr = "ANONYMOUS";
+    generateToken();
+  }
+
+  private static final String ILLEGAL_ARG_MSG = " is NULL, empty or contains a '" + ATTR_SEPARATOR + "'";
+
+  /**
+   * Creates an authentication token.
+   *
+   * @param userName user name.
+   * @param principal principal (commonly matches the user name, with Kerberos is the full/long principal
+   * name while the userName is the short name).
+   * @param type the authentication mechanism name.
+   * (<code>System.currentTimeMillis() + validityPeriod</code>).
+   */
+  public AuthToken(String userName, String principal, String type) {
+    checkForIllegalArgument(userName, "userName");
+    checkForIllegalArgument(principal, "principal");
+    checkForIllegalArgument(type, "type");
+    this.userName = userName;
+    this.principal = principal;
+    this.type = type;
+    this.expires = -1;
+  }
+  
+  /**
+   * Check if the provided value is invalid. Throw an error if it is invalid, NOP otherwise.
+   * 
+   * @param value the value to check.
+   * @param name the parameter name to use in an error message if the value is invalid.
+   */
+  protected static void checkForIllegalArgument(String value, String name) {
+    if (value == null || value.length() == 0 || value.contains(ATTR_SEPARATOR)) {
+      throw new IllegalArgumentException(name + ILLEGAL_ARG_MSG);
+    }
+  }
+
+  /**
+   * Sets the expiration of the token.
+   *
+   * @param expires expiration time of the token in milliseconds since the epoch.
+   */
+  public void setExpires(long expires) {
+    this.expires = expires;
+      generateToken();
+  }
+
+  /**
+   * Returns true if the token has expired.
+   *
+   * @return true if the token has expired.
+   */
+  public boolean isExpired() {
+    return getExpires() != -1 && System.currentTimeMillis() > getExpires();
+  }
+
+  /**
+   * Generates the token.
+   */
+  private void generateToken() {
+    StringBuffer sb = new StringBuffer();
+    sb.append(USER_NAME).append("=").append(getUserName()).append(ATTR_SEPARATOR);
+    sb.append(PRINCIPAL).append("=").append(getName()).append(ATTR_SEPARATOR);
+    sb.append(TYPE).append("=").append(getType()).append(ATTR_SEPARATOR);
+    sb.append(EXPIRES).append("=").append(getExpires());
+    tokenStr = sb.toString();
+  }
+
+  /**
+   * Returns the user name.
+   *
+   * @return the user name.
+   */
+  public String getUserName() {
+    return userName;
+  }
+
+  /**
+   * Returns the principal name (this method name comes from the JDK {@link Principal} interface).
+   *
+   * @return the principal name.
+   */
+  @Override
+  public String getName() {
+    return principal;
+  }
+
+  /**
+   * Returns the authentication mechanism of the token.
+   *
+   * @return the authentication mechanism of the token.
+   */
+  public String getType() {
+    return type;
+  }
+
+  /**
+   * Returns the expiration time of the token.
+   *
+   * @return the expiration time of the token, in milliseconds since Epoc.
+   */
+  public long getExpires() {
+    return expires;
+  }
+
+  /**
+   * Returns the string representation of the token.
+   * <p>
+   * This string representation is parseable by the {@link #parse} method.
+   *
+   * @return the string representation of the token.
+   */
+  @Override
+  public String toString() {
+    return tokenStr;
+  }
+
+  public static AuthToken parse(String tokenStr) throws AuthenticationException {
+    if (tokenStr.length() >= 2) {
+      // strip the \" at the two ends of the tokenStr
+      if (tokenStr.charAt(0) == '\"'
+          && tokenStr.charAt(tokenStr.length() - 1) == '\"') {
+        tokenStr = tokenStr.substring(1, tokenStr.length() - 1);
+      }
+    }
+    Map<String, String> map = split(tokenStr);
+    // remove the signature part, since client doesn't care about it
+    map.remove("s");
+
+    if (!map.keySet().equals(ATTRIBUTES)) {
+      throw new AuthenticationException("Invalid token string, missing attributes");
+    }
+    long expires = Long.parseLong(map.get(EXPIRES));
+    AuthToken token = new AuthToken(map.get(USER_NAME), map.get(PRINCIPAL), map.get(TYPE));
+    token.setExpires(expires);
+    return token;
+  }
+
+  /**
+   * Splits the string representation of a token into attributes pairs.
+   *
+   * @param tokenStr string representation of a token.
+   *
+   * @return a map with the attribute pairs of the token.
+   *
+   * @throws AuthenticationException thrown if the string representation of the token could not be broken into
+   * attribute pairs.
+   */
+  private static Map<String, String> split(String tokenStr) throws AuthenticationException {
+    Map<String, String> map = new HashMap<String, String>();
+    StringTokenizer st = new StringTokenizer(tokenStr, ATTR_SEPARATOR);
+    while (st.hasMoreTokens()) {
+      String part = st.nextToken();
+      int separator = part.indexOf('=');
+      if (separator == -1) {
+        throw new AuthenticationException("Invalid authentication token");
+      }
+      String key = part.substring(0, separator);
+      String value = part.substring(separator + 1);
+      map.put(key, value);
+    }
+    return map;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthenticatedURL.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthenticatedURL.java b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthenticatedURL.java
new file mode 100644
index 0000000..372c5cd
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthenticatedURL.java
@@ -0,0 +1,282 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.kerby.has.common.spnego;
+
+import org.apache.kerby.has.common.util.ConnectionConfigurator;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * <p>
+ * The authentication mechanisms supported by default are Hadoop Simple  authentication
+ * (also known as pseudo authentication) and Kerberos SPNEGO authentication.
+ * <p>
+ * Additional authentication mechanisms can be supported via {@link Authenticator} implementations.
+ * <p>
+ * The default {@link Authenticator} is the {@link KerberosAuthenticator} class which supports
+ * automatic fallback from Kerberos SPNEGO to Hadoop Simple authentication.
+ * <p>
+ * <code>AuthenticatedURL</code> instances are not thread-safe.
+ * <p>
+ * The usage pattern of the {@link AuthenticatedURL} is:
+ * <pre>
+ *
+ * // establishing an initial connection
+ *
+ * URL url = new URL("http://foo:8080/bar");
+ * AuthenticatedURL.Token token = new AuthenticatedURL.Token();
+ * AuthenticatedURL aUrl = new AuthenticatedURL();
+ * HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
+ * ....
+ * // use the 'conn' instance
+ * ....
+ *
+ * // establishing a follow up connection using a token from the previous connection
+ *
+ * HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
+ * ....
+ * // use the 'conn' instance
+ * ....
+ *
+ * </pre>
+ */
+public class AuthenticatedURL {
+
+  /**
+   * Name of the HTTP cookie used for the authentication token between the client and the server.
+   */
+  public static final String AUTH_COOKIE = "hadoop.auth";
+
+  private static final String AUTH_COOKIE_EQ = AUTH_COOKIE + "=";
+
+  /**
+   * Client side authentication token.
+   */
+  public static class Token {
+
+    private String token;
+
+    /**
+     * Creates a token.
+     */
+    public Token() {
+    }
+
+    /**
+     * Creates a token using an existing string representation of the token.
+     *
+     * @param tokenStr string representation of the tokenStr.
+     */
+    public Token(String tokenStr) {
+      if (tokenStr == null) {
+        throw new IllegalArgumentException("tokenStr cannot be null");
+      }
+      set(tokenStr);
+    }
+
+    /**
+     * Returns if a token from the server has been set.
+     *
+     * @return if a token from the server has been set.
+     */
+    public boolean isSet() {
+      return token != null;
+    }
+
+    /**
+     * Sets a token.
+     *
+     * @param tokenStr string representation of the tokenStr.
+     */
+    void set(String tokenStr) {
+      token = tokenStr;
+    }
+
+    /**
+     * Returns the string representation of the token.
+     *
+     * @return the string representation of the token.
+     */
+    @Override
+    public String toString() {
+      return token;
+    }
+
+  }
+
+  private static Class<? extends Authenticator> defaultAuthenticator
+      = KerberosAuthenticator.class;
+
+  /**
+   * Sets the default {@link Authenticator} class to use when an {@link AuthenticatedURL} instance
+   * is created without specifying an authenticator.
+   *
+   * @param authenticator the authenticator class to use as default.
+   */
+  public static void setDefaultAuthenticator(Class<? extends Authenticator> authenticator) {
+    defaultAuthenticator = authenticator;
+  }
+
+  /**
+   * Returns the default {@link Authenticator} class to use when an {@link AuthenticatedURL} instance
+   * is created without specifying an authenticator.
+   *
+   * @return the authenticator class to use as default.
+   */
+  public static Class<? extends Authenticator> getDefaultAuthenticator() {
+    return defaultAuthenticator;
+  }
+
+  private Authenticator authenticator;
+  private ConnectionConfigurator connConfigurator;
+
+  /**
+   * Creates an {@link AuthenticatedURL}.
+   */
+  public AuthenticatedURL() {
+    this(null);
+  }
+
+  /**
+   * Creates an <code>AuthenticatedURL</code>.
+   *
+   * @param authenticator the {@link Authenticator} instance to use, if <code>null</code> a {@link
+   * KerberosAuthenticator} is used.
+   */
+  public AuthenticatedURL(Authenticator authenticator) {
+    this(authenticator, null);
+  }
+
+  /**
+   * Creates an <code>AuthenticatedURL</code>.
+   *
+   * @param authenticator the {@link Authenticator} instance to use, if <code>null</code> a {@link
+   * KerberosAuthenticator} is used.
+   * @param connConfigurator a connection configurator.
+   */
+  public AuthenticatedURL(Authenticator authenticator,
+                          ConnectionConfigurator connConfigurator) {
+    try {
+      this.authenticator = (authenticator != null) ? authenticator : defaultAuthenticator.newInstance();
+    } catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+    this.connConfigurator = connConfigurator;
+    this.authenticator.setConnectionConfigurator(connConfigurator);
+  }
+
+  /**
+   * Returns the {@link Authenticator} instance used by the
+   * <code>AuthenticatedURL</code>.
+   *
+   * @return the {@link Authenticator} instance
+   */
+  protected Authenticator getAuthenticator() {
+    return authenticator;
+  }
+
+  /**
+   * Returns an authenticated {@link HttpURLConnection}.
+   *
+   * @param url the URL to connect to. Only HTTP/S URLs are supported.
+   * @param token the authentication token being used for the user.
+   *
+   * @return an authenticated {@link HttpURLConnection}.
+   *
+   * @throws IOException if an IO error occurred.
+   * @throws AuthenticationException if an authentication exception occurred.
+   */
+  public HttpURLConnection openConnection(URL url, Token token) throws IOException, AuthenticationException {
+    if (url == null) {
+      throw new IllegalArgumentException("url cannot be NULL");
+    }
+    if (!url.getProtocol().equalsIgnoreCase("http") && !url.getProtocol().equalsIgnoreCase("https")) {
+      throw new IllegalArgumentException("url must be for a HTTP or HTTPS resource");
+    }
+    if (token == null) {
+      throw new IllegalArgumentException("token cannot be NULL");
+    }
+    authenticator.authenticate(url, token);
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    if (connConfigurator != null) {
+      conn = connConfigurator.configure(conn);
+    }
+    injectToken(conn, token);
+    return conn;
+  }
+
+  /**
+   * Helper method that injects an authentication token to send with a connection.
+   *
+   * @param conn connection to inject the authentication token into.
+   * @param token authentication token to inject.
+   */
+  public static void injectToken(HttpURLConnection conn, Token token) {
+    String t = token.token;
+    if (t != null) {
+      if (!t.startsWith("\"")) {
+        t = "\"" + t + "\"";
+      }
+      conn.addRequestProperty("Cookie", AUTH_COOKIE_EQ + t);
+    }
+  }
+
+  /**
+   * Helper method that extracts an authentication token received from a connection.
+   * <p>
+   * This method is used by {@link Authenticator} implementations.
+   *
+   * @param conn connection to extract the authentication token from.
+   * @param token the authentication token.
+   *
+   * @throws IOException if an IO error occurred.
+   * @throws AuthenticationException if an authentication exception occurred.
+   */
+  public static void extractToken(HttpURLConnection conn, Token token) throws IOException, AuthenticationException {
+    int respCode = conn.getResponseCode();
+    if (respCode == HttpURLConnection.HTTP_OK
+        || respCode == HttpURLConnection.HTTP_CREATED
+        || respCode == HttpURLConnection.HTTP_ACCEPTED) {
+      Map<String, List<String>> headers = conn.getHeaderFields();
+      List<String> cookies = headers.get("Set-Cookie");
+      if (cookies != null) {
+        for (String cookie : cookies) {
+          if (cookie.startsWith(AUTH_COOKIE_EQ)) {
+            String value = cookie.substring(AUTH_COOKIE_EQ.length());
+            int separator = value.indexOf(";");
+            if (separator > -1) {
+              value = value.substring(0, separator);
+            }
+            if (value.length() > 0) {
+              token.set(value);
+            }
+          }
+        }
+      }
+    } else {
+      token.set(null);
+      throw new AuthenticationException("Authentication failed, status: " + conn.getResponseCode()
+          + ", message: " + conn.getResponseMessage());
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthenticationException.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthenticationException.java b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthenticationException.java
new file mode 100644
index 0000000..38e5f6a
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/AuthenticationException.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.kerby.has.common.spnego;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Exception thrown when an authentication error occurrs.
+ */
+public class AuthenticationException extends Exception {
+  
+  static final long serialVersionUID = 0;
+
+  /**
+   * Creates an {@link AuthenticationException}.
+   *
+   * @param cause original exception.
+   */
+  public AuthenticationException(Throwable cause) {
+    super(cause);
+  }
+
+  /**
+   * Creates an {@link AuthenticationException}.
+   *
+   * @param msg exception message.
+   */
+  public AuthenticationException(String msg) {
+    super(msg);
+  }
+
+  /**
+   * Creates an {@link AuthenticationException}.
+   *
+   * @param msg exception message.
+   * @param cause original exception.
+   */
+  public AuthenticationException(String msg, Throwable cause) {
+    super(msg, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/Authenticator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/Authenticator.java b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/Authenticator.java
new file mode 100644
index 0000000..a643218
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/Authenticator.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.kerby.has.common.spnego;
+
+import org.apache.kerby.has.common.util.ConnectionConfigurator;
+
+import java.io.IOException;
+import java.net.URL;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Interface for client authentication mechanisms.
+ * <p>
+ * Implementations are use-once instances, they don't need to be thread safe.
+ */
+public interface Authenticator {
+
+  /**
+   * Sets a {@link ConnectionConfigurator} instance to use for
+   * configuring connections.
+   *
+   * @param configurator the {@link ConnectionConfigurator} instance.
+   */
+  void setConnectionConfigurator(ConnectionConfigurator configurator);
+
+  /**
+   * Authenticates against a URL and returns a {@link AuthenticatedURL.Token} to be
+   * used by subsequent requests.
+   *
+   * @param url the URl to authenticate against.
+   * @param token the authentication token being used for the user.
+   *
+   * @throws IOException if an IO error occurred.
+   * @throws AuthenticationException if an authentication error occurred.
+   */
+  void authenticate(URL url, AuthenticatedURL.Token token) throws IOException, AuthenticationException;
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosAuthenticator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosAuthenticator.java b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosAuthenticator.java
new file mode 100644
index 0000000..6c43832
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosAuthenticator.java
@@ -0,0 +1,359 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.kerby.has.common.spnego;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.kerby.has.common.util.ConnectionConfigurator;
+import org.ietf.jgss.GSSContext;
+import org.ietf.jgss.GSSManager;
+import org.ietf.jgss.GSSName;
+import org.ietf.jgss.Oid;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosKey;
+import javax.security.auth.kerberos.KerberosTicket;
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.security.AccessControlContext;
+import java.security.AccessController;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.apache.kerby.has.common.util.PlatformName.IBM_JAVA;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * The {@link KerberosAuthenticator} implements the Kerberos SPNEGO authentication sequence.
+ * <p>
+ * It uses the default principal for the Kerberos cache (normally set via kinit).
+ * <p>
+ */
+public class KerberosAuthenticator implements Authenticator {
+  
+  private static final Logger LOG = LoggerFactory.getLogger(KerberosAuthenticator.class);
+
+  /**
+   * HTTP header used by the SPNEGO server endpoint during an authentication sequence.
+   */
+  public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
+
+  /**
+   * HTTP header used by the SPNEGO client endpoint during an authentication sequence.
+   */
+  public static final String AUTHORIZATION = "Authorization";
+
+  /**
+   * HTTP header prefix used by the SPNEGO client/server endpoints during an authentication sequence.
+   */
+  public static final String NEGOTIATE = "Negotiate";
+
+  private static final String AUTH_HTTP_METHOD = "OPTIONS";
+
+  private static String keytabPrincipal = null;
+  private static String keytabFile = null;
+
+  /*
+  * Defines the Kerberos configuration that will be used to obtain the Kerberos principal from the
+  * Kerberos cache.
+  */
+  private static class KerberosConfiguration extends Configuration {
+
+    private static final String OS_LOGIN_MODULE_NAME;
+    private static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");
+    private static final boolean IS_64_BIT = System.getProperty("os.arch").contains("64");
+    private static final boolean AIX = System.getProperty("os.name").equals("AIX");
+
+    /* Return the OS login module class name */
+    private static String getOSLoginModuleName() {
+      if (IBM_JAVA) {
+        if (WINDOWS) {
+          return IS_64_BIT ? "com.ibm.security.auth.module.Win64LoginModule"
+              : "com.ibm.security.auth.module.NTLoginModule";
+        } else if (AIX) {
+          return IS_64_BIT ? "com.ibm.security.auth.module.AIX64LoginModule"
+              : "com.ibm.security.auth.module.AIXLoginModule";
+        } else {
+          return "com.ibm.security.auth.module.LinuxLoginModule";
+        }
+      } else {
+        return WINDOWS ? "com.sun.security.auth.module.NTLoginModule"
+            : "com.sun.security.auth.module.UnixLoginModule";
+      }
+    }
+
+    static {
+      OS_LOGIN_MODULE_NAME = getOSLoginModuleName();
+    }
+
+    private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
+      new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
+                                AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                                new HashMap<String, String>());
+
+    private static final Map<String, String> KEYTAB_KERBEROS_OPTIONS
+        = new HashMap<String, String>();
+    static {
+      if (IBM_JAVA) {
+        KEYTAB_KERBEROS_OPTIONS.put("credsType", "both");
+        KEYTAB_KERBEROS_OPTIONS.put("useKeytab",
+            prependFileAuthority(keytabFile));
+      } else {
+        KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+        KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true");
+        KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true");
+        KEYTAB_KERBEROS_OPTIONS.put("keyTab", keytabFile);
+      }
+      KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
+      KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
+      KEYTAB_KERBEROS_OPTIONS.put("debug", "false");
+    }
+
+    private static final AppConfigurationEntry USER_KERBEROS_LOGIN =
+      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
+                                AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL,
+                                KEYTAB_KERBEROS_OPTIONS);
+
+    private static final AppConfigurationEntry[] USER_KERBEROS_CONF =
+      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, USER_KERBEROS_LOGIN};
+
+    @Override
+    public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
+      return USER_KERBEROS_CONF;
+    }
+
+    private static String prependFileAuthority(String keytabPath) {
+      return keytabPath.startsWith("file://") ? keytabPath
+          : "file://" + keytabPath;
+    }
+  }
+  
+  private URL url;
+  private HttpURLConnection conn;
+  private Base64 base64;
+  private ConnectionConfigurator connConfigurator;
+
+  /**
+   * Sets a {@link ConnectionConfigurator} instance to use for
+   * configuring connections.
+   *
+   * @param configurator the {@link ConnectionConfigurator} instance.
+   */
+  @Override
+  public void setConnectionConfigurator(ConnectionConfigurator configurator) {
+    connConfigurator = configurator;
+  }
+
+  /**
+   * Performs SPNEGO authentication against the specified URL.
+   * <p>
+   * If a token is given it does a NOP and returns the given token.
+   * <p>
+   * If no token is given, it will perform the SPNEGO authentication sequence using an
+   * HTTP <code>OPTIONS</code> request.
+   *
+   * @param url the URl to authenticate against.
+   * @param token the authentication token being used for the user.
+   *
+   * @throws IOException if an IO error occurred.
+   * @throws AuthenticationException if an authentication error occurred.
+   */
+  @Override
+  public void authenticate(URL url, AuthenticatedURL.Token token)
+    throws IOException, AuthenticationException {
+
+    if (!token.isSet()) {
+      this.url = url;
+      base64 = new Base64(0);
+      conn = (HttpURLConnection) url.openConnection();
+      if (connConfigurator != null) {
+        conn = connConfigurator.configure(conn);
+      }
+      conn.setRequestMethod(AUTH_HTTP_METHOD);
+      conn.connect();
+      
+      boolean needFallback = false;
+      if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
+        LOG.debug("JDK performed authentication on our behalf.");
+        // If the JDK already did the SPNEGO back-and-forth for
+        // us, just pull out the token.
+        AuthenticatedURL.extractToken(conn, token);
+        if (isTokenKerberos(token)) {
+          return;
+        }
+        needFallback = true;
+      }
+      if (!needFallback && isNegotiate()) {
+        LOG.debug("Performing our own SPNEGO sequence.");
+        doSpnegoSequence(token);
+      } else {
+        throw new IOException("Should perform our own SPNEGO sequence");
+      }
+    }
+  }
+
+  public void setKeyTab(String keytabFile, String keytabPrincipal) {
+    this.keytabFile = keytabFile;
+    this.keytabPrincipal = keytabPrincipal;
+  }
+
+  /*
+   * Check if the passed token is of type "kerberos" or "kerberos-dt"
+   */
+  private boolean isTokenKerberos(AuthenticatedURL.Token token)
+      throws AuthenticationException {
+    if (token.isSet()) {
+      AuthToken aToken = AuthToken.parse(token.toString());
+      if (aToken.getType().equals("kerberos")
+          || aToken.getType().equals("kerberos-dt")) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /*
+  * Indicates if the response is starting a SPNEGO negotiation.
+  */
+  private boolean isNegotiate() throws IOException {
+    boolean negotiate = false;
+    if (conn.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) {
+      String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
+      negotiate = authHeader != null && authHeader.trim().startsWith(NEGOTIATE);
+    }
+    return negotiate;
+  }
+
+  /**
+   * Implements the SPNEGO authentication sequence interaction using the current default principal
+   * in the Kerberos cache (normally set via kinit).
+   *
+   * @param token the authentication token being used for the user.
+   *
+   * @throws IOException if an IO error occurred.
+   * @throws AuthenticationException if an authentication error occurred.
+   */
+  private void doSpnegoSequence(AuthenticatedURL.Token token) throws IOException, AuthenticationException {
+    try {
+      AccessControlContext context = AccessController.getContext();
+      Subject subject = Subject.getSubject(context);
+      if (subject == null
+          || (subject.getPrivateCredentials(KerberosKey.class).isEmpty()
+              && subject.getPrivateCredentials(KerberosTicket.class).isEmpty())) {
+        LOG.debug("No subject in context, logging in");
+        subject = new Subject();
+        LoginContext login = new LoginContext("", subject,
+            null, new KerberosConfiguration());
+        login.login();
+      }
+
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Using subject: " + subject);
+      }
+      Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
+
+        @Override
+        public Void run() throws Exception {
+          GSSContext gssContext = null;
+          try {
+            GSSManager gssManager = GSSManager.getInstance();
+            String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP",
+                KerberosAuthenticator.this.url.getHost());
+            LOG.info("service principal is:" + servicePrincipal);
+            Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
+            GSSName serviceName = gssManager.createName(servicePrincipal,
+                                                        oid);
+            oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
+            gssContext = gssManager.createContext(serviceName, oid, null,
+                                                  GSSContext.DEFAULT_LIFETIME);
+            gssContext.requestCredDeleg(true);
+            gssContext.requestMutualAuth(true);
+
+            byte[] inToken = new byte[0];
+            byte[] outToken;
+            boolean established = false;
+
+            // Loop while the context is still not established
+            while (!established) {
+              outToken = gssContext.initSecContext(inToken, 0, inToken.length);
+              if (outToken != null) {
+                sendToken(outToken);
+              }
+
+              if (!gssContext.isEstablished()) {
+                inToken = readToken();
+              } else {
+                established = true;
+              }
+            }
+          } finally {
+            if (gssContext != null) {
+              gssContext.dispose();
+              gssContext = null;
+            }
+          }
+          return null;
+        }
+      });
+    } catch (PrivilegedActionException ex) {
+      throw new AuthenticationException(ex.getException());
+    } catch (LoginException ex) {
+      throw new AuthenticationException(ex);
+    }
+    AuthenticatedURL.extractToken(conn, token);
+  }
+
+  /*
+  * Sends the Kerberos token to the server.
+  */
+  private void sendToken(byte[] outToken) throws IOException {
+    String token = base64.encodeToString(outToken);
+    conn = (HttpURLConnection) url.openConnection();
+    if (connConfigurator != null) {
+      conn = connConfigurator.configure(conn);
+    }
+    conn.setRequestMethod(AUTH_HTTP_METHOD);
+    conn.setRequestProperty(AUTHORIZATION, NEGOTIATE + " " + token);
+    conn.connect();
+  }
+
+  /*
+  * Retrieves the Kerberos token returned by the server.
+  */
+  private byte[] readToken() throws IOException, AuthenticationException {
+    int status = conn.getResponseCode();
+    if (status == HttpURLConnection.HTTP_OK || status == HttpURLConnection.HTTP_UNAUTHORIZED) {
+      String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
+      if (authHeader == null || !authHeader.trim().startsWith(NEGOTIATE)) {
+        throw new AuthenticationException("Invalid SPNEGO sequence, '" + WWW_AUTHENTICATE
+            + "' header incorrect: " + authHeader);
+      }
+      String negotiation = authHeader.trim().substring((NEGOTIATE + " ").length()).trim();
+      return base64.decode(negotiation);
+    }
+    throw new AuthenticationException("Invalid SPNEGO sequence, status code: " + status);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosHasAuthenticator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosHasAuthenticator.java b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosHasAuthenticator.java
new file mode 100644
index 0000000..da598a3
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosHasAuthenticator.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.common.spnego;
+
+public class KerberosHasAuthenticator extends KerberosAuthenticator {
+
+    public KerberosHasAuthenticator(String keytabFile, String keytabPrincipal) {
+        setKeyTab(keytabFile, keytabPrincipal);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosUtil.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosUtil.java b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosUtil.java
new file mode 100644
index 0000000..062b6a1
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/kerby/has/common/spnego/KerberosUtil.java
@@ -0,0 +1,262 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.common.spnego;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Set;
+import java.util.regex.Pattern;
+
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
+import org.ietf.jgss.GSSException;
+import org.ietf.jgss.Oid;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosTicket;
+import javax.security.auth.kerberos.KeyTab;
+
+import static org.apache.kerby.has.common.util.PlatformName.IBM_JAVA;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+public class KerberosUtil {
+
+  /* Return the Kerberos login module name */
+  public static String getKrb5LoginModuleName() {
+    return (IBM_JAVA)
+      ? "com.ibm.security.auth.module.Krb5LoginModule"
+      : "com.sun.security.auth.module.Krb5LoginModule";
+  }
+
+  public static Oid getOidInstance(String oidName)
+      throws ClassNotFoundException, GSSException, NoSuchFieldException,
+      IllegalAccessException {
+    Class<?> oidClass;
+    if (IBM_JAVA) {
+      if ("NT_GSS_KRB5_PRINCIPAL".equals(oidName)) {
+        // IBM JDK GSSUtil class does not have field for krb5 principal oid
+        return new Oid("1.2.840.113554.1.2.2.1");
+      }
+      oidClass = Class.forName("com.ibm.security.jgss.GSSUtil");
+    } else {
+      oidClass = Class.forName("sun.security.jgss.GSSUtil");
+    }
+    Field oidField = oidClass.getDeclaredField(oidName);
+    return (Oid) oidField.get(oidClass);
+  }
+
+  public static String getDefaultRealm() 
+      throws ClassNotFoundException, NoSuchMethodException, 
+      IllegalArgumentException, IllegalAccessException, 
+      InvocationTargetException {
+    Object kerbConf;
+    Class<?> classRef;
+    Method getInstanceMethod;
+    Method getDefaultRealmMethod;
+    if (IBM_JAVA) {
+      classRef = Class.forName("com.ibm.security.krb5.internal.Config");
+    } else {
+      classRef = Class.forName("sun.security.krb5.Config");
+    }
+    getInstanceMethod = classRef.getMethod("getInstance", new Class[0]);
+    kerbConf = getInstanceMethod.invoke(classRef, new Object[0]);
+    getDefaultRealmMethod = classRef.getDeclaredMethod("getDefaultRealm",
+        new Class[0]);
+    return (String) getDefaultRealmMethod.invoke(kerbConf, new Object[0]);
+  }
+
+  public static String getDefaultRealmProtected() {
+    String realmString = null;
+    try {
+      realmString = getDefaultRealm();
+    } catch (RuntimeException rte) {
+      //silently catch everything
+    } catch (Exception e) {
+      //silently return null
+    }
+    return realmString;
+  }
+
+  /*
+   * For a Service Host Principal specification, map the host's domain
+   * to kerberos realm, as specified by krb5.conf [domain_realm] mappings.
+   * Unfortunately the mapping routines are private to the security.krb5
+   * package, so have to construct a PrincipalName instance to derive the realm.
+   *
+   * Many things can go wrong with Kerberos configuration, and this is not
+   * the place to be throwing exceptions to help debug them.  Nor do we choose
+   * to make potentially voluminous logs on every call to a communications API.
+   * So we simply swallow all exceptions from the underlying libraries and
+   * return null if we can't get a good value for the realmString.
+   *
+   * @param shortprinc A service principal name with host fqdn as instance, e.g.
+   *     "HTTP/myhost.mydomain"
+   * @return String value of Kerberos realm, mapped from host fqdn
+   *     May be default realm, or may be null.
+   */
+  public static String getDomainRealm(String shortprinc) {
+    Class<?> classRef;
+    Object principalName; //of type sun.security.krb5.PrincipalName or IBM equiv
+    String realmString = null;
+    try {
+      if (IBM_JAVA) {
+        classRef = Class.forName("com.ibm.security.krb5.PrincipalName");
+      } else {
+        classRef = Class.forName("sun.security.krb5.PrincipalName");
+      }
+      int tKrbNtSrvHst = classRef.getField("KRB_NT_SRV_HST").getInt(null);
+      principalName = classRef.getConstructor(String.class, int.class).
+          newInstance(shortprinc, tKrbNtSrvHst);
+      realmString = (String) classRef.getMethod("getRealmString", new Class[0]).
+          invoke(principalName, new Object[0]);
+    } catch (RuntimeException rte) {
+      //silently catch everything
+    } catch (Exception e) {
+      //silently return default realm (which may itself be null)
+    }
+    if (null == realmString || realmString.equals("")) {
+      return getDefaultRealmProtected();
+    } else {
+      return realmString;
+    }
+  }
+
+  /* Return fqdn of the current host */
+  static String getLocalHostName() throws UnknownHostException {
+    return InetAddress.getLocalHost().getCanonicalHostName();
+  }
+  
+  /**
+   * Create Kerberos principal for a given service and hostname,
+   * inferring realm from the fqdn of the hostname. It converts
+   * hostname to lower case. If hostname is null or "0.0.0.0", it uses
+   * dynamically looked-up fqdn of the current host instead.
+   * If domain_realm mappings are inadequately specified, it will
+   * use default_realm, per usual Kerberos behavior.
+   * If default_realm also gives a null value, then a principal
+   * without realm will be returned, which by Kerberos definitions is
+   * just another way to specify default realm.
+   *
+   * @param service
+   *          Service for which you want to generate the principal.
+   * @param hostname
+   *          Fully-qualified domain name.
+   * @return Converted Kerberos principal name.
+   * @throws UnknownHostException
+   *           If no IP address for the local host could be found.
+   */
+  public static final String getServicePrincipal(String service,
+      String hostname)
+      throws UnknownHostException {
+    String fqdn = hostname;
+    String shortprinc = null;
+    String realmString = null;
+    if (null == fqdn || fqdn.equals("") || fqdn.equals("0.0.0.0")) {
+      fqdn = getLocalHostName();
+    }
+    // convert hostname to lowercase as kerberos does not work with hostnames
+    // with uppercase characters.
+    fqdn = fqdn.toLowerCase(Locale.US);
+    shortprinc = service + "/" + fqdn;
+    // Obtain the realm name inferred from the domain of the host
+    realmString = getDomainRealm(shortprinc);
+    if (null == realmString || realmString.equals("")) {
+      return shortprinc;
+    } else {
+      return shortprinc + "@" + realmString;
+    }
+  }
+
+  /**
+   * Get all the unique principals present in the keytabfile.
+   * 
+   * @param keytabFileName 
+   *          Name of the keytab file to be read.
+   * @return list of unique principals in the keytab.
+   * @throws IOException 
+   *          If keytab entries cannot be read from the file.
+   */
+  static final String[] getPrincipalNames(String keytabFileName) throws IOException {
+    Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
+    Set<String> principals = new HashSet<String>();
+    List<PrincipalName> entries = keytab.getPrincipals();
+    for (PrincipalName entry : entries) {
+      principals.add(entry.getName().replace("\\", "/"));
+    }
+    return principals.toArray(new String[0]);
+  }
+
+  /**
+   * Get all the unique principals from keytabfile which matches a pattern.
+   * 
+   * @param keytab Name of the keytab file to be read.
+   * @param pattern pattern to be matched.
+   * @return list of unique principals which matches the pattern.
+   * @throws IOException if cannot get the principal name
+   */
+  public static final String[] getPrincipalNames(String keytab,
+      Pattern pattern) throws IOException {
+    String[] principals = getPrincipalNames(keytab);
+    if (principals.length != 0) {
+      List<String> matchingPrincipals = new ArrayList<String>();
+      for (String principal : principals) {
+        if (pattern.matcher(principal).matches()) {
+          matchingPrincipals.add(principal);
+        }
+      }
+      principals = matchingPrincipals.toArray(new String[0]);
+    }
+    return principals;
+  }
+
+  /**
+   * Check if the subject contains Kerberos keytab related objects.
+   * The Kerberos keytab object attached in subject has been changed
+   * from KerberosKey (JDK 7) to KeyTab (JDK 8)
+   *
+   *
+   * @param subject subject to be checked
+   * @return true if the subject contains Kerberos keytab
+   */
+  public static boolean hasKerberosKeyTab(Subject subject) {
+    return !subject.getPrivateCredentials(KeyTab.class).isEmpty();
+  }
+
+  /**
+   * Check if the subject contains Kerberos ticket.
+   *
+   *
+   * @param subject subject to be checked
+   * @return true if the subject contains Kerberos ticket
+   */
+  public static boolean hasKerberosTicket(Subject subject) {
+    return !subject.getPrivateCredentials(KerberosTicket.class).isEmpty();
+  }
+}


[09/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-server/pom.xml b/has/has-server/pom.xml
index 30a4aa8..4771741 100644
--- a/has/has-server/pom.xml
+++ b/has/has-server/pom.xml
@@ -5,7 +5,7 @@
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
-    <groupId>org.apache.hadoop</groupId>
+    <groupId>org.apache.kerby</groupId>
     <artifactId>has-project</artifactId>
     <version>1.0.0-SNAPSHOT</version>
   </parent>
@@ -90,7 +90,7 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
-      <version>3.0.0-alpha2</version>
+      <version>${hadoop.version}</version>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
@@ -109,7 +109,7 @@
       <version>RELEASE</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.kerby</groupId>
       <artifactId>has-common</artifactId>
       <version>${project.version}</version>
     </dependency>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
deleted file mode 100644
index b4cd5d6..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.kerby.kerberos.kerb.KrbRuntime;
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public abstract class AbstractHasServerPlugin implements HasServerPlugin {
-
-    public static final Logger LOG = LoggerFactory.getLogger(AbstractHasServerPlugin.class);
-
-    protected abstract void doAuthenticate(AuthToken userToken, AuthToken authToken)
-        throws HasAuthenException;
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public AuthToken authenticate(AuthToken userToken) throws HasAuthenException {
-
-        AuthToken authToken = KrbRuntime.getTokenProvider("JWT").createTokenFactory().createToken();
-
-        doAuthenticate(userToken, authToken);
-
-        return authToken;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
deleted file mode 100644
index 14df580..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.hadoop.has.common.HasException;
-
-public class HasAuthenException extends HasException {
-    private static final long serialVersionUID = 171016915395892939L;
-
-    public HasAuthenException(Throwable cause) {
-        super(cause);
-    }
-
-    public HasAuthenException(String message) {
-        super(message);
-    }
-
-    public HasAuthenException(String message, Throwable cause) {
-        super(message, cause);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
deleted file mode 100644
index cb22b8e..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
+++ /dev/null
@@ -1,701 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.commons.dbutils.DbUtils;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.web.WebConfigKey;
-import org.apache.hadoop.has.server.web.WebServer;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.kerby.kerberos.kdc.impl.NettyKdcServerImpl;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadmin;
-import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadminImpl;
-import org.apache.kerby.kerberos.kerb.client.ClientUtil;
-import org.apache.kerby.kerberos.kerb.client.KrbConfig;
-import org.apache.kerby.kerberos.kerb.client.KrbSetting;
-import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
-import org.apache.kerby.kerberos.kerb.identity.backend.IdentityBackend;
-import org.apache.kerby.kerberos.kerb.server.KdcServer;
-import org.apache.kerby.kerberos.kerb.server.KdcUtil;
-import org.apache.kerby.util.IOUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Pattern;
-
-/**
- * The HAS KDC server implementation.
- */
-public class HasServer {
-    public static final Logger LOG = LoggerFactory.getLogger(HasServer.class);
-
-    private static HasServer server = null;
-
-    private KrbSetting krbSetting;
-    private KdcServer kdcServer;
-    private WebServer webServer;
-    private File confDir;
-    private File workDir;
-    private String kdcHost;
-    private HasConfig hasConfig;
-
-    public HasServer(File confDir) throws KrbException {
-        this.confDir = confDir;
-    }
-
-    private void setConfDir(File confDir) {
-        this.confDir = confDir;
-    }
-
-    public File getConfDir() {
-        return confDir;
-    }
-
-    public File getWorkDir() {
-        return workDir;
-    }
-
-    public void setWorkDir(File workDir) {
-        this.workDir = workDir;
-    }
-
-    public void setKdcHost(String host) {
-        this.kdcHost = host;
-    }
-
-    public String getKdcHost() {
-        return kdcHost;
-    }
-
-    public KrbSetting getKrbSetting() {
-        return krbSetting;
-    }
-
-    public KdcServer getKdcServer() {
-        return kdcServer;
-    }
-
-    public WebServer getWebServer() {
-        return webServer;
-    }
-
-    public void setWebServer(WebServer webServer) {
-        this.webServer = webServer;
-    }
-
-    public void startKdcServer() throws HasException {
-        BackendConfig backendConfig;
-        try {
-            backendConfig = KdcUtil.getBackendConfig(getConfDir());
-        } catch (KrbException e) {
-            throw new HasException("Failed to get backend config. " + e);
-        }
-        String backendJar = backendConfig.getString("kdc_identity_backend");
-        if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
-            updateKdcConf();
-        }
-        try {
-            kdcServer = new KdcServer(confDir);
-        } catch (KrbException e) {
-            throw new HasException("Failed to create KdcServer. " + e);
-        }
-        kdcServer.setWorkDir(workDir);
-        kdcServer.setInnerKdcImpl(new NettyKdcServerImpl(kdcServer.getKdcSetting()));
-        try {
-            kdcServer.init();
-        } catch (KrbException e) {
-            LOG.error("Errors occurred when init has kdc server:  " + e.getMessage());
-            throw new HasException("Errors occurred when init has kdc server:  " + e.getMessage());
-        }
-
-        KrbConfig krbConfig = null;
-        try {
-            krbConfig = ClientUtil.getConfig(confDir);
-        } catch (KrbException e) {
-            new HasException("Errors occurred when getting the config from conf dir. "
-                + e.getMessage());
-        }
-        if (krbConfig == null) {
-            krbConfig = new KrbConfig();
-        }
-        this.krbSetting = new KrbSetting(krbConfig);
-        try {
-            kdcServer.start();
-        } catch (KrbException e) {
-            throw new HasException("Failed to start kdc server. " + e);
-        }
-        try {
-            HasUtil.setEnableConf(new File(confDir, "has-server.conf"), "false");
-        } catch (Exception e) {
-            throw new HasException("Failed to enable conf. " + e);
-        }
-        setHttpFilter();
-    }
-
-    private void setHttpFilter() throws HasException {
-        File httpKeytabFile = new File(workDir, "http.keytab");
-        LocalKadmin kadmin = new LocalKadminImpl(kdcServer.getKdcSetting(),
-            kdcServer.getIdentityService());
-        createHttpPrincipal(kadmin);
-        try {
-            kadmin.exportKeytab(httpKeytabFile, getHttpPrincipal());
-        } catch (KrbException e) {
-            throw new HasException("Failed to export keytab: " + e.getMessage());
-        }
-        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE,
-            hasConfig.getFilterAuthType());
-        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
-            getHttpPrincipal());
-        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
-            httpKeytabFile.getPath());
-        webServer.defineFilter();
-    }
-
-    public File initKdcServer() throws KrbException {
-        File adminKeytabFile = new File(workDir, "admin.keytab");
-        LocalKadmin kadmin = new LocalKadminImpl(kdcServer.getKdcSetting(),
-            kdcServer.getIdentityService());
-        if (adminKeytabFile.exists()) {
-            throw new KrbException("KDC Server is already inited.");
-        }
-        kadmin.createBuiltinPrincipals();
-        kadmin.exportKeytab(adminKeytabFile, kadmin.getKadminPrincipal());
-        System.out.println("The keytab for kadmin principal "
-            + " has been exported to the specified file "
-            + adminKeytabFile.getAbsolutePath() + ", please safely keep it, "
-            + "in order to use kadmin tool later");
-
-        return adminKeytabFile;
-    }
-
-    public void createHttpPrincipal(LocalKadmin kadmin) throws HasException {
-        String httpPrincipal = getHttpPrincipal();
-        IdentityBackend backend = kdcServer.getIdentityService();
-        try {
-            if (backend.getIdentity(httpPrincipal) == null) {
-                kadmin.addPrincipal(httpPrincipal);
-            } else {
-                LOG.info("The http principal already exists in backend.");
-            }
-        } catch (KrbException e) {
-            throw new HasException("Failed to add princial, " + e.getMessage());
-        }
-    }
-
-    public String getHttpPrincipal() throws HasException {
-        String realm = kdcServer.getKdcSetting().getKdcRealm();
-        String nameString;
-        try {
-            InetAddress addr = InetAddress.getLocalHost();
-            String fqName = addr.getCanonicalHostName();
-            nameString = "HTTP/" + fqName + "@" + realm;
-        } catch (UnknownHostException e) {
-            throw new HasException(e);
-        }
-        LOG.info("The http principal name is: " + nameString);
-        return nameString;
-    }
-
-    /**
-     * Update conf file.
-     *
-     * @param confName  conf file name
-     * @param values    customized values
-     * @throws IOException throw IOException
-     * @throws KrbException e
-     */
-    public void updateConfFile(String confName, Map<String, String> values)
-        throws IOException, HasException {
-        File confFile = new File(getConfDir().getAbsolutePath(), confName);
-        if (confFile.exists()) {
-            // Update conf file content
-            InputStream templateResource;
-            if (confName.equals("has-server.conf")) {
-                templateResource = new FileInputStream(confFile);
-            } else {
-                String resourcePath = "/" + confName + ".template";
-                templateResource = getClass().getResourceAsStream(resourcePath);
-            }
-            String content = IOUtil.readInput(templateResource);
-            for (Map.Entry<String, String> entry : values.entrySet()) {
-                content = content.replaceAll(Pattern.quote(entry.getKey()), entry.getValue());
-            }
-
-            // Delete the original conf file
-            boolean delete = confFile.delete();
-            if (!delete) {
-                throw new HasException("Failed to delete conf file: " + confName);
-            }
-
-            // Save the updated conf file
-            IOUtil.writeFile(content, confFile);
-        } else {
-            throw new HasException("Conf file: " + confName + " not found.");
-        }
-    }
-
-    /**
-     * Get KDC Config from MySQL.
-     *
-     * @return Kdc config
-     * @throws KrbException e
-     */
-    private Map<String, String> getKdcConf() throws HasException {
-        PreparedStatement preStm = null;
-        ResultSet result = null;
-        Map<String, String> kdcConf = new HashMap<>();
-        BackendConfig backendConfig;
-        try {
-            backendConfig = KdcUtil.getBackendConfig(getConfDir());
-        } catch (KrbException e) {
-            throw new HasException("Getting backend config failed." + e.getMessage());
-        }
-        String driver = backendConfig.getString("mysql_driver");
-        String url = backendConfig.getString("mysql_url");
-        String user = backendConfig.getString("mysql_user");
-        String password = backendConfig.getString("mysql_password");
-        Connection connection = startConnection(driver, url, user, password);
-        try {
-
-            // Get Kdc configuration from kdc_config table
-            String stmKdc = "SELECT * FROM `kdc_config` WHERE id = 1";
-            preStm = connection.prepareStatement(stmKdc);
-            result = preStm.executeQuery();
-            while (result.next()) {
-                String realm = result.getString("realm");
-                String servers = result.getString("servers");
-                String port = String.valueOf(result.getInt("port"));
-                kdcConf.put("servers", servers);
-                kdcConf.put("_PORT_", port);
-                kdcConf.put("_REALM_", realm);
-            }
-
-        } catch (SQLException e) {
-            LOG.error("Error occurred while getting kdc config.");
-            throw new HasException("Failed to get kdc config. ", e);
-        } finally {
-            DbUtils.closeQuietly(preStm);
-            DbUtils.closeQuietly(result);
-            DbUtils.closeQuietly(connection);
-        }
-
-        return kdcConf;
-    }
-
-    /**
-     * Update KDC conf file.
-     *
-     * @throws KrbException e
-     */
-    private void updateKdcConf() throws HasException {
-        try {
-            Map<String, String> values = getKdcConf();
-            String host = getKdcHost();
-            if (host == null) {
-                host = getWebServer().getBindAddress().getHostName();
-            }
-            values.remove("servers");
-            values.put("_HOST_", host);
-            updateConfFile("kdc.conf", values);
-        } catch (IOException e) {
-            throw new HasException("Failed to update kdc config. ", e);
-        }
-    }
-
-    /**
-     * Start the MySQL connection.
-     *
-     * @param url url of connection
-     * @param user username of connection
-     * @param password password of connection
-     * @throws KrbException e
-     * @return MySQL JDBC connection
-     */
-    private Connection startConnection(String driver, String url, String user,
-                                       String password) throws HasException {
-        Connection connection;
-        try {
-            Class.forName(driver);
-            connection = DriverManager.getConnection(url, user, password);
-            if (!connection.isClosed()) {
-                LOG.info("Succeeded in connecting to MySQL.");
-            }
-        } catch (ClassNotFoundException e) {
-            throw new HasException("JDBC Driver Class not found. ", e);
-        } catch (SQLException e) {
-            throw new HasException("Failed to connecting to MySQL. ", e);
-        }
-
-        return connection;
-    }
-
-    /**
-     * Config HAS server KDC which have MySQL backend.
-     * @param backendConfig MySQL backend config
-     * @param realm KDC realm to set
-     * @param host KDC host to set
-     * @param hasServer has server to get param
-     * @throws HasException e
-     */
-    public void configMySQLKdc(BackendConfig backendConfig, String realm, int port,
-                               String host, HasServer hasServer) throws HasException {
-
-        // Start mysql connection
-        String driver = backendConfig.getString("mysql_driver");
-        String url = backendConfig.getString("mysql_url");
-        String user = backendConfig.getString("mysql_user");
-        String password = backendConfig.getString("mysql_password");
-        Connection connection = startConnection(driver, url, user, password);
-
-        ResultSet resConfig = null;
-        PreparedStatement preStm = null;
-        try {
-            createKdcTable(connection); // Create kdc_config table if not exists
-            String stm = "SELECT * FROM `kdc_config` WHERE id = 1";
-            preStm = connection.prepareStatement(stm);
-            resConfig = preStm.executeQuery();
-            if (!resConfig.next()) {
-                addKdcConfig(connection, realm, port, host);
-            } else {
-                String oldHost = hasServer.getKdcHost();
-                String servers = resConfig.getString("servers");
-                String[] serverArray = servers.split(",");
-                List<String> serverList = new ArrayList<>();
-                Collections.addAll(serverList, serverArray);
-                if (serverList.contains(oldHost)) {
-                    servers = servers.replaceAll(oldHost, host);
-                } else {
-                    servers = servers + "," + host;
-                }
-                boolean initialized = resConfig.getBoolean("initialized");
-                updateKdcConfig(connection, initialized, port, realm, servers);
-            }
-            hasServer.setKdcHost(host);
-        } catch (SQLException e) {
-            throw new HasException("Failed to config HAS KDC. ", e);
-        } finally {
-            DbUtils.closeQuietly(preStm);
-            DbUtils.closeQuietly(resConfig);
-            DbUtils.closeQuietly(connection);
-        }
-    }
-
-    /**
-     * Create kdc_config table in database.
-     * @param conn database connection
-     * @throws KrbException e
-     */
-    private void createKdcTable(final Connection conn) throws HasException {
-        PreparedStatement preStm = null;
-        try {
-            String stm = "CREATE TABLE IF NOT EXISTS `kdc_config` ("
-                + "port INTEGER DEFAULT 88, servers VARCHAR(255) NOT NULL, "
-                + "initialized bool DEFAULT FALSE, realm VARCHAR(255) "
-                + "DEFAULT NULL, id INTEGER DEFAULT 1, CHECK (id=1), PRIMARY KEY (id)) "
-                + "ENGINE=INNODB;";
-            preStm = conn.prepareStatement(stm);
-            preStm.executeUpdate();
-        } catch (SQLException e) {
-            throw new HasException("Failed to create kdc_config table. ", e);
-        } finally {
-            DbUtils.closeQuietly(preStm);
-        }
-    }
-
-    /**
-     * Add KDC Config information in database.
-     * @param conn database connection
-     * @param realm realm to add
-     * @param port port to add
-     * @param host host to add
-     */
-    private void addKdcConfig(Connection conn, String realm, int port, String host)
-        throws HasException {
-        PreparedStatement preStm = null;
-        try {
-            String stm = "INSERT INTO `kdc_config` (port, servers, realm)" + " VALUES(?, ?, ?)";
-            preStm = conn.prepareStatement(stm);
-            preStm.setInt(1, port);
-            preStm.setString(2, host);
-            preStm.setString(3, realm);
-            preStm.executeUpdate();
-        } catch (SQLException e) {
-            throw new HasException("Failed to insert into kdc_config table. ", e);
-        } finally {
-            DbUtils.closeQuietly(preStm);
-        }
-    }
-
-    /**
-     * Update KDC Config record in database.
-     * @param conn database connection
-     * @param realm realm to update
-     * @param port port to update
-     * @param servers servers to update
-     * @param initialized initial state of KDC Config
-     */
-    private void updateKdcConfig(Connection conn, boolean initialized, int port,
-                                 String realm, String servers) throws HasException {
-        PreparedStatement preStm = null;
-        try {
-            if (initialized) {
-                String stmUpdate = "UPDATE `kdc_config` SET servers = ? WHERE id = 1";
-                preStm = conn.prepareStatement(stmUpdate);
-                preStm.setString(1, servers);
-                preStm.executeUpdate();
-            } else {
-                String stmUpdate = "UPDATE `kdc_config` SET port = ?, realm = ?, servers = ? WHERE id = 1";
-                preStm = conn.prepareStatement(stmUpdate);
-                preStm.setInt(1, port);
-                preStm.setString(2, realm);
-                preStm.setString(3, servers);
-                preStm.executeUpdate();
-            }
-        } catch (SQLException e) {
-            throw new HasException("Failed to update KDC Config. ", e);
-        } finally {
-            DbUtils.closeQuietly(preStm);
-        }
-    }
-
-    /**
-     * Read in krb5-template.conf and substitute in the correct port.
-     *
-     * @return krb5 conf file
-     * @throws IOException e
-     * @throws KrbException e
-     */
-    public File generateKrb5Conf() throws HasException {
-        Map<String, String> kdcConf = getKdcConf();
-        String[] servers = kdcConf.get("servers").split(",");
-        int kdcPort = Integer.parseInt(kdcConf.get("_PORT_"));
-        String kdcRealm = kdcConf.get("_REALM_");
-        StringBuilder kdcBuilder = new StringBuilder();
-        for (String server : servers) {
-            String append = "\t\tkdc = " + server.trim() + ":" + kdcPort + "\n";
-            kdcBuilder.append(append);
-        }
-        String kdc = kdcBuilder.toString();
-        kdc = kdc.substring(0, kdc.length() - 1);
-        String resourcePath = "/krb5.conf.template";
-        InputStream templateResource = getClass().getResourceAsStream(resourcePath);
-        String content = null;
-        try {
-            content = IOUtil.readInput(templateResource);
-        } catch (IOException e) {
-            throw new HasException("Read template resource failed. " + e);
-        }
-        content = content.replaceAll("_REALM_", kdcRealm);
-        content = content.replaceAll("_PORT_", String.valueOf(kdcPort));
-        content = content.replaceAll("_UDP_LIMIT_", "4096");
-        content = content.replaceAll("_KDCS_", kdc);
-        File confFile = new File(confDir, "krb5.conf");
-        if (confFile.exists()) {
-            boolean delete = confFile.delete();
-            if (!delete) {
-                throw new HasException("File delete error!");
-            }
-        }
-        try {
-            IOUtil.writeFile(content, confFile);
-        } catch (IOException e) {
-            throw new HasException("Write content to conf file failed. " + e);
-        }
-
-        return confFile;
-    }
-
-    /**
-     * Read in has-server.conf and create has-client.conf.
-     *
-     * @return has conf file
-     * @throws IOException e
-     * @throws HasException e
-     */
-    public File generateHasConf() throws HasException, IOException {
-        Map<String, String> kdcConf = getKdcConf();
-        String servers = kdcConf.get("servers");
-        File confFile = new File(getConfDir().getAbsolutePath(), "has-server.conf");
-        HasConfig hasConfig = HasUtil.getHasConfig(confFile);
-        if (hasConfig != null) {
-            String defaultValue = hasConfig.getHttpsHost();
-            InputStream templateResource = new FileInputStream(confFile);
-            String content = IOUtil.readInput(templateResource);
-            content = content.replaceFirst(Pattern.quote(defaultValue), servers);
-            File hasFile = new File(confDir, "has-client.conf");
-            IOUtil.writeFile(content, hasFile);
-            return hasFile;
-        } else {
-            throw new HasException("has-server.conf not found. ");
-        }
-    }
-
-    public void stopKdcServer() {
-        try {
-            kdcServer.stop();
-        } catch (KrbException e) {
-            LOG.error("Fail to stop has kdc server");
-        }
-    }
-
-    public void startWebServer() throws HasException {
-        if (webServer == null) {
-            HasConfig conf = new HasConfig();
-
-            // Parse has-server.conf to get http_host and http_port
-            File confFile = new File(confDir, "has-server.conf");
-            hasConfig = HasUtil.getHasConfig(confFile);
-            if (hasConfig != null) {
-                try {
-                    String httpHost;
-                    String httpPort;
-                    String httpsHost;
-                    String httpsPort;
-                    if (hasConfig.getHttpHost() != null) {
-                        httpHost = hasConfig.getHttpHost();
-                    } else {
-                        LOG.info("Cannot get the http_host from has-server.conf, using the default http host.");
-                        httpHost = WebConfigKey.HAS_HTTP_HOST_DEFAULT;
-                    }
-                    if (hasConfig.getHttpPort() != null) {
-                        httpPort = hasConfig.getHttpPort();
-                    } else {
-                        LOG.info("Cannot get the http_port from has-server.conf, using the default http port.");
-                        httpPort = String.valueOf(WebConfigKey.HAS_HTTP_PORT_DEFAULT);
-                    }
-                    if (hasConfig.getHttpsHost() != null) {
-                        httpsHost = hasConfig.getHttpsHost();
-                    } else {
-                        LOG.info("Cannot get the https_host from has-server.conf, using the default https host.");
-                        httpsHost = WebConfigKey.HAS_HTTPS_HOST_DEFAULT;
-                    }
-                    if (hasConfig.getHttpsPort() != null) {
-                        httpsPort = hasConfig.getHttpsPort();
-                    } else {
-                        LOG.info("Cannot get the https_port from has-server.conf , using the default https port.");
-                        httpsPort = String.valueOf(WebConfigKey.HAS_HTTPS_PORT_DEFAULT);
-                    }
-                    String hasHttpAddress = httpHost + ":" + httpPort;
-                    String hasHttpsAddress = httpsHost + ":" + httpsPort;
-                    LOG.info("The web server http address: " + hasHttpAddress);
-                    LOG.info("The web server https address: " + hasHttpsAddress);
-
-                    conf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY, hasHttpAddress);
-                    conf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, hasHttpsAddress);
-                    conf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY,
-                        HttpConfig.Policy.HTTP_AND_HTTPS.name());
-                    conf.setString(WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
-                        hasConfig.getSslServerConf());
-                    webServer = new WebServer(conf);
-                } catch (NumberFormatException e) {
-                    throw new IllegalArgumentException("https_port should be a number. "
-                        + e.getMessage());
-                }
-            } else {
-                throw new HasException("has-server.conf not found in " + confDir + ". ");
-            }
-        } else {
-            hasConfig = webServer.getConf();
-        }
-        webServer.start();
-        webServer.defineConfFilter();
-        try {
-            HasUtil.setEnableConf(new File(confDir, "has-server.conf"), "true");
-        } catch (IOException e) {
-            throw new HasException("Errors occurred when enable conf. " + e.getMessage());
-        }
-        webServer.setWebServerAttribute(this);
-    }
-
-    public void stopWebServer() {
-        if (webServer != null) {
-            try {
-                webServer.stop();
-            } catch (Exception e) {
-                LOG.error("Failed to stop http server. " + e.getMessage());
-            }
-        }
-    }
-
-    public static void main(String[] args) {
-        if (args[0].equals("-start")) {
-            String confDirPath = args[1];
-            String workDirPath = args[2];
-            File confDir = new File(confDirPath);
-            File workDir = new File(workDirPath);
-            if (!confDir.exists() || !workDir.exists()) {
-                LOG.error("Invalid or not exist conf-dir or work-dir");
-                System.exit(3);
-            }
-            try {
-                server = new HasServer(confDir);
-            } catch (KrbException e) {
-                LOG.error("Errors occurred when create kdc server:  " + e.getMessage());
-                System.exit(4);
-            }
-            server.setConfDir(confDir);
-            server.setWorkDir(workDir);
-            //Only start the webserver, the kdcserver can start after setting the realm
-            try {
-                server.startWebServer();
-            } catch (HasException e) {
-                LOG.error("Errors occurred when start has http server:  " + e.getMessage());
-                System.exit(6);
-            }
-
-            if (server.getWebServer().getHttpAddress() != null) {
-                LOG.info("HAS http server started.");
-                LOG.info("host: " + server.getWebServer().getHttpAddress().getHostName());
-                LOG.info("port: " + server.getWebServer().getHttpAddress().getPort());
-            }
-            if (server.getWebServer().getHttpsAddress() != null) {
-                LOG.info("HAS https server started.");
-                LOG.info("host: " + server.getWebServer().getHttpsAddress().getHostName());
-                LOG.info("port: " + server.getWebServer().getHttpsAddress().getPort());
-            }
-        } else if (args[0].equals("-stop")) {
-            if (server != null) {
-                server.stopWebServer();
-                server.stopKdcServer();
-            }
-        } else {
-            System.exit(2);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
deleted file mode 100644
index 6650308..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-
-public interface HasServerPlugin {
-        /**
-         * Get the login module type ID, used to distinguish this module from others.
-         * Should correspond to the client side module.
-         *
-         * @return login type
-         */
-        String getLoginType();
-
-        /**
-         * Perform all the server side authentication logics, the results wrapped in an AuthToken,
-         * will be used to exchange a Kerberos ticket.
-         *
-         * @param userToken user token
-         * @return auth token
-         */
-        AuthToken authenticate(AuthToken userToken) throws HasAuthenException;
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
deleted file mode 100644
index 621b321..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.hadoop.has.common.HasException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collections;
-import java.util.Map;
-import java.util.ServiceLoader;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-
-public class HasServerPluginRegistry {
-    static final Logger LOG = LoggerFactory.getLogger(HasServerPluginRegistry.class);
-
-    private static Map<String, Class> allPlugins = new ConcurrentHashMap<>();
-
-    static {
-        ServiceLoader<HasServerPlugin> plugins = ServiceLoader.load(HasServerPlugin.class);
-
-        for (HasServerPlugin plugin : plugins) {
-            allPlugins.put(plugin.getLoginType(), plugin.getClass());
-        }
-    }
-
-    public static Set<String> registeredPlugins() {
-        return Collections.unmodifiableSet(allPlugins.keySet());
-    }
-
-    public static boolean registeredPlugin(String name) {
-        return allPlugins.containsKey(name);
-    }
-
-    public static HasServerPlugin createPlugin(String name) throws HasException {
-        if (!registeredPlugin(name)) {
-            throw new HasException("Unregistered plugin " + name);
-        }
-        try {
-            HasServerPlugin serverPlugin = (HasServerPlugin) allPlugins.get(name).newInstance();
-            return serverPlugin;
-        } catch (Exception e) {
-            LOG.error("Create {} plugin failed", name, e);
-            throw new HasException(e.getMessage());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
deleted file mode 100644
index b49c255..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
+++ /dev/null
@@ -1,382 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.admin;
-
-import org.apache.hadoop.has.common.HasAdmin;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.hadoop.has.server.web.HostRoleType;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadmin;
-import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadminImpl;
-import org.apache.kerby.kerberos.kerb.common.KrbUtil;
-import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
-import org.apache.kerby.kerberos.kerb.request.KrbIdentity;
-import org.apache.kerby.kerberos.kerb.server.KdcConfig;
-import org.apache.kerby.kerberos.kerb.server.KdcSetting;
-import org.apache.kerby.kerberos.kerb.server.KdcUtil;
-import org.apache.kerby.kerberos.kerb.server.ServerSetting;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-
-public class LocalHasAdmin implements HasAdmin {
-    public static final Logger LOG = LoggerFactory.getLogger(LocalHasAdmin.class);
-
-    private final ServerSetting serverSetting;
-    private File confDir;
-
-    public LocalHasAdmin(HasServer hasServer) throws KrbException {
-        if (hasServer.getKdcServer() == null) {
-            throw new RuntimeException("Could not get HAS KDC server, please start KDC first.");
-        }
-        this.serverSetting = hasServer.getKdcServer().getKdcSetting();
-    }
-
-    /**
-     * Construct with prepared conf dir.
-     *
-     * @param confDir The path of conf dir
-     * @throws KrbException e
-     */
-    public LocalHasAdmin(File confDir) throws KrbException {
-        this.confDir = confDir;
-        KdcConfig tmpKdcConfig = KdcUtil.getKdcConfig(confDir);
-        if (tmpKdcConfig == null) {
-            tmpKdcConfig = new KdcConfig();
-        }
-
-        BackendConfig tmpBackendConfig = KdcUtil.getBackendConfig(confDir);
-        if (tmpBackendConfig == null) {
-            tmpBackendConfig = new BackendConfig();
-        }
-
-        this.serverSetting = new KdcSetting(tmpKdcConfig, tmpBackendConfig);
-    }
-
-    @Override
-    public List<String> getPrincipals(String exp) throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        List<String> princs = null;
-        LOG.info("The value of exp is : " + exp);
-        if (exp == null || exp.equals("")) {
-            try {
-                princs = kadmin.getPrincipals();
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        } else {
-            try {
-                princs = kadmin.getPrincipals(exp);
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        }
-        return princs;
-    }
-
-    @Override
-    public void addPrincipal(String principal, String password) throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        if (principal == null) {
-            throw new HasException("Value of principal is null.");
-        }
-        if (password == null || password.equals("")) {
-            try {
-                kadmin.addPrincipal(principal);
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        } else {
-            try {
-                kadmin.addPrincipal(principal, password);
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        }
-        LOG.info("Success to add principal :" + principal);
-    }
-
-    @Override
-    public void deletePrincipal(String principal) throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-             throw new HasException(e);
-        }
-        if (principal == null) {
-            throw new IllegalArgumentException("Value of principal is null.");
-        }
-        try {
-            kadmin.deletePrincipal(principal);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        LOG.info("Success to delete principal :" + principal);
-    }
-
-    @Override
-    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        try {
-            kadmin.renamePrincipal(oldPrincipal, newPrincipal);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        LOG.info("Success to rename principal : \"" + oldPrincipal
-                + "\" to \"" + newPrincipal + "\".");
-    }
-
-    @Override
-    public String addPrincByRole(String host, String role) throws HasException {
-        String result = "";
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        String releam = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
-        String[] princs = HostRoleType.valueOf(role).getPrincs();
-        if (princs == null) {
-            LOG.error("Cannot find the role of : " + role);
-            return "Cannot find the role of : " + role;
-        }
-        for (String princ : princs) {
-            try {
-                kadmin.addPrincipal(princ + releam);
-                LOG.info("Success to add princ :" + princ + releam);
-                result = result + "Success to add princ :" + princ + releam + "\n";
-            } catch (KrbException e) {
-                LOG.info(e.getMessage());
-                result = e.getMessage() + "\n";
-            }
-        }
-        return result;
-    }
-
-    @Override
-    public File getKeytabByHostAndRole(String host, String role) throws HasException {
-        LocalKadmin kadmin;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        String realm = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
-        File path = new File("/tmp/" + System.currentTimeMillis());
-        path.mkdirs();
-        File keytab = new File(path, role + "-" + host + ".keytab");
-        if (keytab.exists()) {
-            keytab.delete();
-        }
-        String[] princs = HostRoleType.valueOf(role).getPrincs();
-        for (String princ : princs) {
-            try {
-                if (kadmin.getPrincipal(princ + realm) == null) {
-                    continue;
-                }
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-            try {
-                kadmin.exportKeytab(keytab, princ + realm);
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        }
-        return keytab;
-    }
-
-    public void getKeytabByHostAndRole(String host, String role, File keytab) throws HasException {
-        LocalKadmin kadmin;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        String realm = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
-        if (keytab.exists()) {
-            keytab.delete();
-        }
-        String[] princs = HostRoleType.valueOf(role).getPrincs();
-        for (String princ : princs) {
-            try {
-                if (kadmin.getPrincipal(princ + realm) == null) {
-                    continue;
-                }
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-            try {
-                kadmin.exportKeytab(keytab, princ + realm);
-                System.out.println("Success to export keytab : " + keytab.getAbsolutePath());
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        }
-    }
-
-    @Override
-    public List<String> getPrincipals() throws HasException {
-        LocalKadmin kadmin;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        try {
-            return kadmin.getPrincipals();
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-    }
-
-    public KrbIdentity getPrincipal(String principalName) throws HasException {
-        LocalKadmin kadmin;
-        KrbIdentity identity;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        try {
-            identity = kadmin.getPrincipal(principalName);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        return identity;
-    }
-
-    @Override
-    public void addPrincipal(String principal) throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        if (principal == null) {
-            throw new HasException("Value of principal is null.");
-        }
-        try {
-            kadmin.addPrincipal(principal);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        LOG.info("Success to add principal :" + principal);
-    }
-
-    @Override
-    public String getHadminPrincipal() {
-        return KrbUtil.makeKadminPrincipal(serverSetting.getKdcRealm()).getName();
-    }
-
-    /**
-     * get size of principal
-     */
-    @Override
-    public int size() throws HasException {
-        return this.getPrincipals().size();
-    }
-
-    @Override
-    public void setEnableOfConf(String isEnable) throws HasException {
-        File hasConf = new File(confDir, "has-server.conf");
-        if (!hasConf.exists()) {
-            System.err.println("has-server.conf is not exists.");
-            return;
-        }
-        try {
-            HasUtil.setEnableConf(hasConf, isEnable);
-        } catch (IOException e) {
-            System.err.println(e.getMessage());
-            return;
-        }
-    }
-
-    @Override
-    public void exportKeytab(File keytabFile, String principal)
-        throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        try {
-            kadmin.exportKeytab(keytabFile, principal);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-    }
-
-    @Override
-    public void exportKeytab(File keytabFile, List<String> principals)
-            throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        try {
-            kadmin.exportKeytab(keytabFile, principals);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-    }
-
-    public void getHostRoles() {
-        for (HostRoleType role : HostRoleType.values()) {
-            System.out.print("\tHostRole: " + role.getName()
-                    + ", PrincipalNames: ");
-            String[] princs = role.getPrincs();
-            for (int j = 0; j < princs.length; j++) {
-                System.out.print(princs[j]);
-                if (j == princs.length - 1) {
-                    System.out.println();
-                } else {
-                    System.out.print(", ");
-                }
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
deleted file mode 100644
index f880c48..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
+++ /dev/null
@@ -1,315 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.kdc;
-
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.kerby.kerberos.kerb.KrbCodec;
-import org.apache.kerby.kerberos.kerb.KrbErrorCode;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.client.KrbContext;
-import org.apache.kerby.kerberos.kerb.common.EncryptionUtil;
-import org.apache.kerby.kerberos.kerb.common.KrbUtil;
-import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
-import org.apache.kerby.kerberos.kerb.server.KdcContext;
-import org.apache.kerby.kerberos.kerb.server.KdcRecoverableException;
-import org.apache.kerby.kerberos.kerb.server.KdcServer;
-import org.apache.kerby.kerberos.kerb.server.preauth.PreauthHandler;
-import org.apache.kerby.kerberos.kerb.server.request.AsRequest;
-import org.apache.kerby.kerberos.kerb.server.request.KdcRequest;
-import org.apache.kerby.kerberos.kerb.type.KerberosTime;
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
-import org.apache.kerby.kerberos.kerb.type.base.HostAddress;
-import org.apache.kerby.kerberos.kerb.type.base.HostAddresses;
-import org.apache.kerby.kerberos.kerb.type.base.KrbError;
-import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
-import org.apache.kerby.kerberos.kerb.type.base.KrbToken;
-import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
-import org.apache.kerby.kerberos.kerb.type.base.TokenFormat;
-import org.apache.kerby.kerberos.kerb.type.kdc.AsReq;
-import org.apache.kerby.kerberos.kerb.type.kdc.KdcOption;
-import org.apache.kerby.kerberos.kerb.type.kdc.KdcOptions;
-import org.apache.kerby.kerberos.kerb.type.kdc.KdcReqBody;
-import org.apache.kerby.kerberos.kerb.type.pa.PaData;
-import org.apache.kerby.kerberos.kerb.type.pa.PaDataEntry;
-import org.apache.kerby.kerberos.kerb.type.pa.PaDataType;
-import org.apache.kerby.kerberos.kerb.type.pa.token.PaTokenRequest;
-import org.apache.kerby.kerberos.kerb.type.pa.token.TokenInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class HasKdcHandler {
-    private static final Logger LOG = LoggerFactory.getLogger(HasKdcHandler.class);
-
-    private KdcContext kdcContext;
-    private KrbContext krbContext;
-    private KdcServer kdcServer;
-
-    /**
-     * Constructor with has server.
-     *
-     * @param hasServer has server
-     */
-    public HasKdcHandler(HasServer hasServer) {
-        this.krbContext = new KrbContext();
-        this.krbContext.init(hasServer.getKrbSetting());
-        this.kdcServer = hasServer.getKdcServer();
-        prepareHandler(kdcServer);
-    }
-
-    public KrbContext getKrbContext() {
-        return krbContext;
-    }
-
-    public KdcContext getKdcContext() {
-        return kdcContext;
-    }
-
-    private KdcServer getKdcServer() {
-        return kdcServer;
-    }
-
-    private void prepareHandler(KdcServer kdcServer) {
-        this.kdcContext = new KdcContext(kdcServer.getKdcSetting());
-        this.kdcContext.setIdentityService(kdcServer.getIdentityService());
-        PreauthHandler preauthHandler = new PreauthHandler();
-        preauthHandler.init();
-        this.kdcContext.setPreauthHandler(preauthHandler);
-    }
-
-    private String getAudience(String name) {
-        return name + "/" + getKdcContext().getKdcRealm() + "@" + getKdcContext().getKdcRealm();
-    }
-
-    public KrbMessage getResponse(AuthToken authToken, String passPhrase) {
-        KrbMessage krbMessage = null;
-        try {
-            krbMessage = handleMessage(authToken, passPhrase);
-        } catch (KrbException e) {
-            LOG.error("Failed to handle message. " + e.getMessage());
-        }
-        return krbMessage;
-    }
-
-    /**
-     * Process the client request message.
-     */
-    public KrbMessage handleMessage(AuthToken authToken, String passPhrase) throws KrbException {
-
-        // set the audiences
-        List<String> auds = new ArrayList<String>();
-        String audience = getAudience("krbtgt");
-        auds.add(audience);
-        authToken.setAudiences(auds);
-
-        AsReq asReq = createAsReq(authToken);
-        KdcRequest kdcRequest = new AsRequest(asReq, kdcContext);
-        kdcRequest.setHttps(true);
-        List<EncryptionType> requestedTypes = getEncryptionTypes();
-        EncryptionType bestType = EncryptionUtil.getBestEncryptionType(requestedTypes,
-                kdcContext.getConfig().getEncryptionTypes());
-
-        if (bestType == null) {
-            LOG.error("Can't get the best encryption type.");
-            throw new KrbException(KrbErrorCode.KDC_ERR_ETYPE_NOSUPP);
-        }
-
-        PrincipalName clientPrincipal = new PrincipalName(authToken.getSubject());
-        String clientRealm = asReq.getReqBody().getRealm();
-        if (clientRealm == null || clientRealm.isEmpty()) {
-            clientRealm = getKdcContext().getKdcRealm();
-        }
-        clientPrincipal.setRealm(clientRealm);
-
-        // Set the client key
-        EncryptionKey clientKey = HasUtil.getClientKey(clientPrincipal.getName(),
-            passPhrase, bestType);
-        kdcRequest.setClientKey(clientKey);
-
-        // Set the token issuers
-        getKdcServer().getKdcConfig().setString(KdcConfigKey.TOKEN_ISSUERS, "has");
-
-        KrbMessage krbResponse;
-
-        try {
-            kdcRequest.process();
-            krbResponse = kdcRequest.getReply();
-        } catch (KrbException e) {
-            LOG.error("Error occurred when request tgt. " + e.getMessage());
-            if (e instanceof KdcRecoverableException) {
-                krbResponse = handleRecoverableException(
-                        (KdcRecoverableException) e, kdcRequest);
-            } else {
-                KrbError krbError = new KrbError();
-                krbError.setStime(KerberosTime.now());
-                krbError.setSusec(100);
-                if (e.getKrbErrorCode() != null) {
-                    krbError.setErrorCode(e.getKrbErrorCode());
-                } else {
-                    krbError.setErrorCode(KrbErrorCode.UNKNOWN_ERR);
-                }
-                krbError.setCrealm(kdcContext.getKdcRealm());
-                if (kdcRequest.getClientPrincipal() != null) {
-                    krbError.setCname(kdcRequest.getClientPrincipal());
-                }
-                krbError.setRealm(kdcContext.getKdcRealm());
-                if (kdcRequest.getServerPrincipal() != null) {
-                    krbError.setSname(kdcRequest.getServerPrincipal());
-                } else {
-                    PrincipalName serverPrincipal = kdcRequest.getKdcReq().getReqBody().getSname();
-                    serverPrincipal.setRealm(kdcRequest.getKdcReq().getReqBody().getRealm());
-                    krbError.setSname(serverPrincipal);
-                }
-                if (KrbErrorCode.KRB_AP_ERR_BAD_INTEGRITY.equals(e.getKrbErrorCode())) {
-                    krbError.setEtext("PREAUTH_FAILED");
-                } else {
-                    krbError.setEtext(e.getMessage());
-                }
-                krbResponse = krbError;
-            }
-        }
-        return krbResponse;
-    }
-
-    /**
-     * Process the recoverable exception.
-     *
-     * @param e The exception return by kdc
-     * @param kdcRequest kdc request
-     * @return The KrbError
-     */
-    private KrbMessage handleRecoverableException(KdcRecoverableException e,
-                                                  KdcRequest kdcRequest)
-            throws KrbException {
-        LOG.info("KRB error occurred while processing request:"
-                + e.getMessage());
-
-        KrbError error = e.getKrbError();
-        error.setStime(KerberosTime.now());
-        error.setSusec(100);
-        error.setErrorCode(e.getKrbError().getErrorCode());
-        error.setRealm(kdcContext.getKdcRealm());
-        if (kdcRequest != null) {
-            error.setSname(kdcRequest.getKdcReq().getReqBody().getCname());
-        } else {
-            error.setSname(new PrincipalName("NONE"));
-        }
-        error.setEtext(e.getMessage());
-        return error;
-    }
-
-    public AsReq createAsReq(AuthToken authToken) throws KrbException {
-        AsReq asReq = new AsReq();
-        KdcReqBody body = makeReqBody();
-        asReq.setReqBody(body);
-
-        PaTokenRequest tokenPa = new PaTokenRequest();
-        KrbToken krbToken = new KrbToken(authToken, TokenFormat.JWT);
-        tokenPa.setToken(krbToken);
-        TokenInfo info = new TokenInfo();
-        info.setTokenVendor(authToken.getIssuer());
-        tokenPa.setTokenInfo(info);
-
-        PaDataEntry paDataEntry = new PaDataEntry();
-        paDataEntry.setPaDataType(PaDataType.TOKEN_REQUEST);
-        paDataEntry.setPaDataValue(KrbCodec.encode(tokenPa));
-
-        PaData paData = new PaData();
-        paData.addElement(paDataEntry);
-        asReq.setPaData(paData);
-        return asReq;
-    }
-
-    /**
-     * Create the KdcReqBody
-     *
-     * @return KdcReqBody
-     *
-     * @throws KrbException e
-     */
-     protected KdcReqBody makeReqBody() throws KrbException {
-        KdcReqBody body = new KdcReqBody();
-
-        long startTime = System.currentTimeMillis();
-        body.setFrom(new KerberosTime(startTime));
-
-         // set the client principal as null
-        PrincipalName cName = null;
-        body.setCname(cName);
-
-        body.setRealm(getKrbContext().getKrbSetting().getKdcRealm());
-
-        PrincipalName sName = getServerPrincipal();
-        body.setSname(sName);
-
-        body.setTill(new KerberosTime(startTime + krbContext.getTicketValidTime()));
-
-        int nonce = krbContext.generateNonce();
-        body.setNonce(nonce);
-//        setChosenNonce(nonce);
-
-        body.setKdcOptions(getKdcOptions());
-
-        HostAddresses addresses = getHostAddresses();
-        if (addresses != null) {
-            body.setAddresses(addresses);
-        }
-
-        body.setEtypes(getEncryptionTypes());
-
-        return body;
-    }
-
-    private PrincipalName getServerPrincipal() {
-        return KrbUtil.makeTgsPrincipal(getKrbContext().getKrbSetting().getKdcRealm());
-    }
-
-    private KdcOptions getKdcOptions() {
-        KdcOptions kdcOptions = new KdcOptions();
-        // By default enforce these flags
-        kdcOptions.setFlag(KdcOption.FORWARDABLE);
-        kdcOptions.setFlag(KdcOption.PROXIABLE);
-        kdcOptions.setFlag(KdcOption.RENEWABLE_OK);
-        return kdcOptions;
-    }
-
-    public HostAddresses getHostAddresses() {
-        List<HostAddress> hostAddresses = new ArrayList<HostAddress>();
-        HostAddresses addresses = null;
-        //empty
-        if (!hostAddresses.isEmpty()) {
-            addresses = new HostAddresses();
-            for (HostAddress ha : hostAddresses) {
-                addresses.addElement(ha);
-            }
-        }
-        return addresses;
-    }
-
-    public List<EncryptionType> getEncryptionTypes() {
-        List<EncryptionType> encryptionTypes = krbContext.getConfig().getEncryptionTypes();
-        return EncryptionUtil.orderEtypesByStrength(encryptionTypes);
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
deleted file mode 100644
index 3f397fb..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.kdc;
-
-import org.apache.kerby.config.ConfigKey;
-
-/**
- * Define all the MySQL backend related configuration items with default values.
- */
-public enum MySQLConfKey implements ConfigKey {
-    MYSQL_DRIVER("com.mysql.jdbc.Driver"),
-    MYSQL_URL("jdbc:mysql://127.0.0.1:3306/mysqlbackend"),
-    MYSQL_USER("root"),
-    MYSQL_PASSWORD("passwd");
-
-    private Object defaultValue;
-
-    MySQLConfKey() {
-        this.defaultValue = null;
-    }
-
-    MySQLConfKey(Object defaultValue) {
-        this.defaultValue = defaultValue;
-    }
-
-    @Override
-    public String getPropertyKey() {
-        return name().toLowerCase();
-    }
-
-    @Override
-    public Object getDefaultValue() {
-        return this.defaultValue;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
deleted file mode 100644
index ec02312..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
+++ /dev/null
@@ -1,426 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.kdc;
-
-import org.apache.commons.dbutils.DbUtils;
-import org.apache.directory.api.util.GeneralizedTime;
-import org.apache.kerby.config.Config;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.identity.backend.AbstractIdentityBackend;
-import org.apache.kerby.kerberos.kerb.request.KrbIdentity;
-import org.apache.kerby.kerberos.kerb.type.KerberosTime;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.sql.ResultSet;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import javax.sql.rowset.serial.SerialBlob;
-import java.util.Map;
-import java.util.List;
-import java.util.ArrayList;
-import java.text.ParseException;
-
-/**
- * A MySQL based backend implementation.
- */
-public class MySQLIdentityBackend extends AbstractIdentityBackend {
-    private Connection connection;
-    private String driver;
-    private String url;
-    private String user;
-    private String password;
-    private static final Logger LOG = LoggerFactory.getLogger(MySQLIdentityBackend.class);
-    private String identityTable;
-    private String keyInfoTable;
-
-    /**
-     * Constructing an instance using specified config that contains anything
-     * to be used to initialize an MySQL Backend.
-     * @param config . The config is used to config the backend.
-     */
-    public MySQLIdentityBackend(final Config config) {
-        setConfig(config);
-    }
-
-    public MySQLIdentityBackend() { }
-
-    /**
-     * Start the MySQL connection.
-     */
-    private void startConnection() throws KrbException {
-        try {
-            Class.forName(driver);
-            connection = DriverManager.getConnection(url, user, password);
-            if (!connection.isClosed()) {
-                LOG.info("Succeeded in connecting to MySQL.");
-            }
-        } catch (ClassNotFoundException e) {
-            throw new KrbException("JDBC Driver Class not found. ", e);
-        } catch (SQLException e) {
-            throw new KrbException("Failed to connecting to MySQL. ", e);
-        }
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected void doInitialize() throws KrbException {
-        LOG.info("Initializing the MySQL identity backend.");
-        driver = getConfig().getString(MySQLConfKey.MYSQL_DRIVER, true);
-        user = getConfig().getString(MySQLConfKey.MYSQL_USER, true);
-        password = getConfig().getString(MySQLConfKey.MYSQL_PASSWORD, true);
-
-        String urlString = getConfig().getString(MySQLConfKey.MYSQL_URL, true);
-        if (urlString == null || urlString.isEmpty()) {
-            urlString = getBackendConfig().getString(MySQLConfKey.MYSQL_URL, true);
-        }
-        url = urlString;
-
-        PreparedStatement preInitialize = null;
-        PreparedStatement preKdcRealm = null;
-        ResultSet resKdcRelam = null;
-        PreparedStatement preIdentity = null;
-        PreparedStatement preKey = null;
-        try {
-            startConnection();
-
-            // Set initialized for kdc config
-            String stmInitialize = "UPDATE `kdc_config` SET initialized = true WHERE id = 1";
-            preInitialize = connection.prepareStatement(stmInitialize);
-            preInitialize.executeUpdate();
-
-            // Get identity table name according to realm of kdc
-            String stmKdcRealm = "SELECT realm FROM `kdc_config`";
-            preKdcRealm = connection.prepareStatement(stmKdcRealm);
-            resKdcRelam = preKdcRealm.executeQuery();
-            if (resKdcRelam.next()) {
-                String realm = resKdcRelam.getString("realm").toLowerCase();
-                identityTable = "`" + realm + "_identity" + "`";
-                keyInfoTable = "`" + realm + "_key" + "`";
-            } else {
-                throw new KrbException("Failed to get kdc config.");
-            }
-
-            // Create identity table
-            String stmIdentity = "CREATE TABLE IF NOT EXISTS " + identityTable
-                + " (principal varchar(255) NOT NULL, key_version INTEGER "
-                + "DEFAULT 1, kdc_flags INTEGER DEFAULT 0, disabled bool "
-                + "DEFAULT NULL, locked bool DEFAULT NULL, expire_time "
-                + "VARCHAR(255) DEFAULT NULL, created_time VARCHAR(255) "
-                + "DEFAULT NULL, PRIMARY KEY (principal) ) ENGINE=INNODB;";
-            preIdentity = connection.prepareStatement(stmIdentity);
-            preIdentity.executeUpdate();
-
-            // Create key table
-            String stmKey = "CREATE TABLE IF NOT EXISTS " + keyInfoTable
-                + " (key_id INTEGER NOT NULL AUTO_INCREMENT, key_type "
-                + "VARCHAR(255) DEFAULT NULL, kvno INTEGER DEFAULT -1, "
-                + "key_value BLOB DEFAULT NULL, principal VARCHAR(255) NOT NULL,"
-                + "PRIMARY KEY (key_id), INDEX (principal), FOREIGN KEY "
-                + "(principal) REFERENCES " + identityTable + "(principal) "
-                + ") ENGINE=INNODB;";
-            preKey = connection.prepareStatement(stmKey);
-            preKey.executeUpdate();
-
-        } catch (SQLException e) {
-            LOG.error("Error occurred while initialize MySQL backend." + e.toString());
-            throw new KrbException("Failed to create table in database. ", e);
-        } finally {
-            DbUtils.closeQuietly(preInitialize);
-            DbUtils.closeQuietly(preKdcRealm);
-            DbUtils.closeQuietly(resKdcRelam);
-            DbUtils.closeQuietly(preIdentity);
-            DbUtils.closeQuietly(preKey);
-            DbUtils.closeQuietly(connection);
-        }
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected void doStop() throws KrbException {
-        try {
-            closeConnection();
-            if (connection.isClosed()) {
-                LOG.info("Succeeded in closing connection with MySQL.");
-            }
-        } catch (SQLException e) {
-            LOG.error("Failed to close connection with MySQL.");
-            throw new KrbException("Failed to close connection with MySQL. ", e);
-        }
-    }
-
-    /**
-     * Close the connection for stop().
-     * @throws SQLException if SQLException handled
-     */
-    private void closeConnection() throws SQLException {
-        if (!connection.isClosed()) {
-            connection.close();
-        }
-    }
-
-    /**
-     * Convert a KerberosTime type object to a generalized time form of String.
-     * @param kerberosTime The kerberos time to convert
-     */
-    private String toGeneralizedTime(final KerberosTime kerberosTime) {
-        GeneralizedTime generalizedTime = new GeneralizedTime(kerberosTime.getValue());
-        return generalizedTime.toString();
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected KrbIdentity doAddIdentity(KrbIdentity identity) throws KrbException {
-        String principalName = identity.getPrincipalName();
-        int keyVersion = identity.getKeyVersion();
-        int kdcFlags = identity.getKdcFlags();
-        boolean disabled = identity.isDisabled();
-        boolean locked = identity.isLocked();
-        String createdTime = toGeneralizedTime(identity.getCreatedTime());
-        String expireTime = toGeneralizedTime(identity.getExpireTime());
-        Map<EncryptionType, EncryptionKey> keys = identity.getKeys();
-
-        PreparedStatement preIdentity = null;
-        PreparedStatement preKey = null;
-
-        KrbIdentity duplicateIdentity = doGetIdentity(principalName);
-        if (duplicateIdentity != null) {
-            LOG.warn("The identity maybe duplicate.");
-
-            return duplicateIdentity;
-        } else {
-            try {
-                startConnection();
-                connection.setAutoCommit(false);
-
-                // Insert identity to identity table
-                String stmIdentity = "insert into " + identityTable + " values(?, ?, ?, ?, ?, ?, ?)";
-                preIdentity = connection.prepareStatement(stmIdentity);
-                preIdentity.setString(1, principalName);
-                preIdentity.setInt(2, keyVersion);
-                preIdentity.setInt(3, kdcFlags);
-                preIdentity.setBoolean(4, disabled);
-                preIdentity.setBoolean(5, locked);
-                preIdentity.setString(6, createdTime);
-                preIdentity.setString(7, expireTime);
-                preIdentity.executeUpdate();
-
-                // Insert keys to key table
-                for (Map.Entry<EncryptionType, EncryptionKey> entry : keys.entrySet()) {
-                    String stmKey = "insert into " + keyInfoTable + " (key_type, kvno, key_value, principal)"
-                        + " values(?, ?, ?, ?)";
-                    preKey = connection.prepareStatement(stmKey);
-                    preKey.setString(1, entry.getKey().getName());
-                    preKey.setInt(2, entry.getValue().getKvno());
-                    preKey.setBlob(3, new SerialBlob(entry.getValue().getKeyData()));
-                    preKey.setString(4, principalName);
-                    preKey.executeUpdate();
-                }
-
-                connection.commit();
-                return identity;
-            } catch (SQLException e) {
-                try {
-                    LOG.info("Transaction is being rolled back.");
-                    connection.rollback();
-                } catch (SQLException ex) {
-                    throw new KrbException("Transaction roll back failed. ", ex);
-                }
-                LOG.error("Error occurred while adding identity.");
-                throw new KrbException("Failed to add identity. ", e);
-            } finally {
-                DbUtils.closeQuietly(preIdentity);
-                DbUtils.closeQuietly(preKey);
-                doStop();
-            }
-        }
-    }
-
-    /**
-     * Create kerberos time.
-     * @param generalizedTime generalized time
-     * @throws ParseException parse exception
-     */
-    private KerberosTime createKerberosTime(final String generalizedTime) throws ParseException {
-        long time = new GeneralizedTime(generalizedTime).getTime();
-        return new KerberosTime(time);
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected KrbIdentity doGetIdentity(final String principalName) throws KrbException {
-        KrbIdentity krbIdentity = new KrbIdentity(principalName);
-
-        PreparedStatement preIdentity = null;
-        ResultSet resIdentity = null;
-        PreparedStatement preKey = null;
-        ResultSet resKey = null;
-        try {
-            startConnection();
-
-            // Get identity from identity table
-            String stmIdentity = "SELECT * FROM " + identityTable + " where principal = ?";
-            preIdentity = connection.prepareStatement(stmIdentity);
-            preIdentity.setString(1, principalName);
-            resIdentity = preIdentity.executeQuery();
-
-            if (!resIdentity.next()) {
-                return null;
-            }
-
-            while (resIdentity.next()) {
-                krbIdentity.setKeyVersion(resIdentity.getInt("key_version"));
-                krbIdentity.setKdcFlags(resIdentity.getInt("kdc_flags"));
-                krbIdentity.setDisabled(resIdentity.getBoolean("disabled"));
-                krbIdentity.setLocked(resIdentity.getBoolean("locked"));
-                krbIdentity.setCreatedTime(createKerberosTime(resIdentity.getString("created_time")));
-                krbIdentity.setExpireTime(createKerberosTime(resIdentity.getString("expire_time")));
-            }
-
-            // Get keys from key table
-            List<EncryptionKey> keys = new ArrayList<>();
-            String stmKey = "SELECT * FROM " + keyInfoTable + " where principal = ?";
-            preKey = connection.prepareStatement(stmKey);
-            preKey.setString(1, principalName);
-            resKey = preKey.executeQuery();
-            while (resKey.next()) {
-                int kvno = resKey.getInt("kvno");
-                String keyType = resKey.getString("key_type");
-                EncryptionType eType = EncryptionType.fromName(keyType);
-                byte[] keyValue = resKey.getBytes("key_value");
-                EncryptionKey key = new EncryptionKey(eType, keyValue, kvno);
-                keys.add(key);
-            }
-
-            krbIdentity.addKeys(keys);
-            return krbIdentity;
-        } catch (SQLException e) {
-            LOG.error("Error occurred while getting identity.");
-            throw new KrbException("Failed to get identity. ", e);
-        } catch (ParseException e) {
-            throw new KrbException("Failed to get identity. ", e);
-        } finally {
-            DbUtils.closeQuietly(preIdentity);
-            DbUtils.closeQuietly(resIdentity);
-            DbUtils.closeQuietly(preKey);
-            DbUtils.closeQuietly(resKey);
-            doStop();
-        }
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected KrbIdentity doUpdateIdentity(KrbIdentity identity) throws KrbException {
-        String principalName = identity.getPrincipalName();
-        try {
-            doDeleteIdentity(principalName); // Delete former identity
-            doAddIdentity(identity); // Insert new identity
-        } catch (KrbException e) {
-            LOG.error("Error occurred while updating identity: " + principalName);
-            throw new KrbException("Failed to update identity. ", e);
-        }
-
-        return getIdentity(principalName);
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected void doDeleteIdentity(String principalName) throws KrbException {
-        PreparedStatement preKey = null;
-        PreparedStatement preIdentity = null;
-        try {
-            startConnection();
-            connection.setAutoCommit(false);
-
-            // Delete keys from key table
-            String stmKey = "DELETE FROM  " + keyInfoTable + " where principal = ?";
-            preKey = connection.prepareStatement(stmKey);
-            preKey.setString(1, principalName);
-            preKey.executeUpdate();
-
-            // Dlete identity from identity table
-            String stmIdentity = "DELETE FROM " + identityTable + " where principal = ? ";
-            preIdentity = connection.prepareStatement(stmIdentity);
-            preIdentity.setString(1, principalName);
-            preIdentity.executeUpdate();
-
-            connection.commit();
-        } catch (SQLException e) {
-            try {
-                LOG.info("Transaction is being rolled back.");
-                connection.rollback();
-            } catch (SQLException ex) {
-                throw new KrbException("Transaction roll back failed. ", ex);
-            }
-            LOG.error("Error occurred while deleting identity.");
-            throw new KrbException("Failed to delete identity. ", e);
-        } finally {
-            DbUtils.closeQuietly(preIdentity);
-            DbUtils.closeQuietly(preKey);
-            doStop();
-        }
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected Iterable<String> doGetIdentities() throws KrbException {
-        List<String> identityNames = new ArrayList<>();
-        PreparedStatement preSmt = null;
-        ResultSet result = null;
-        try {
-            startConnection();
-            String statement = "SELECT * FROM " + identityTable;
-            preSmt = connection.prepareStatement(statement);
-            result = preSmt.executeQuery();
-            while (result.next()) {
-                identityNames.add(result.getString("principal"));
-            }
-            result.close();
-            preSmt.close();
-        } catch (SQLException e) {
-            LOG.error("Error occurred while getting identities.");
-            throw new KrbException("Failed to get identities. ", e);
-        } finally {
-            DbUtils.closeQuietly(preSmt);
-            DbUtils.closeQuietly(result);
-            doStop();
-        }
-
-        return identityNames;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
deleted file mode 100644
index 78ce1e9..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package org.apache.hadoop.has.server.web;
-
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasServer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import java.io.File;
-import java.io.IOException;
-@Private
-@Unstable
-public class ConfFilter implements Filter {
-    public static final Logger LOG = LoggerFactory.getLogger(ConfFilter.class);
-    @Override
-    public void init(FilterConfig filterConfig) throws ServletException {
-
-    }
-
-    @Override
-    public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse,
-                         FilterChain filterChain) throws IOException, ServletException {
-
-        final HasServer hasServer = WebServer.getHasServerFromContext(
-                servletRequest.getServletContext());
-        HasConfig hasConfig;
-        try {
-            hasConfig = HasUtil.getHasConfig(
-                    new File(hasServer.getConfDir(), "has-server.conf"));
-            String isEnableConf = hasConfig.getEnableConf();
-            if (!isEnableConf.equals("true")) {
-                throw new RuntimeException("The kdc has started.");
-            }
-            filterChain.doFilter(servletRequest, servletResponse);
-        } catch (HasException e) {
-            LOG.error(e.getMessage());
-        }
-    }
-
-    @Override
-    public void destroy() {
-
-    }
-}
\ No newline at end of file


[04/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonConfApi.java b/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonConfApi.java
new file mode 100644
index 0000000..f880e78
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonConfApi.java
@@ -0,0 +1,83 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server.json;
+
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.kerby.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import java.io.File;
+import java.io.IOException;
+import javax.ws.rs.core.MultivaluedMap;
+
+import static org.junit.Assert.assertEquals;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestJsonConfApi extends TestRestApiBase {
+
+    @Test
+    public void testSetPlugin() {
+        WebResource webResource = getWebResource("conf/setplugin");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("plugin", "RAM");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("HAS plugin set successfully.\n", response);
+    }
+
+    @Test
+    public void testConfigKdcBackend() {
+        WebResource webResource = getWebResource("conf/configkdcbackend");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("backendType", "json");
+        String backend = null;
+        try {
+            backend = new File(testDir, "json-backend").getCanonicalPath();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        params.add("dir", backend);
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("Json backend set successfully.\n", response);
+    }
+
+    @Test
+    public void testConfigXJsonKdc() {
+        WebResource webResource = getWebResource("conf/configkdc");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("realm", "HADOOP.COM");
+        params.add("host", "localhost");
+        params.add("port", "8866");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("HAS server KDC set successfully.\n", response);
+    }
+
+    @Test
+    public void testGetKrb5Conf() {
+        getKrb5Conf();
+    }
+
+    @Test
+    public void testGetHasConf() {
+        getHasConf();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonHadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonHadminApi.java b/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonHadminApi.java
new file mode 100644
index 0000000..cbf86a7
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonHadminApi.java
@@ -0,0 +1,80 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server.json;
+
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.kerby.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import javax.ws.rs.core.MultivaluedMap;
+
+import static org.junit.Assert.assertEquals;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestJsonHadminApi extends TestRestApiBase {
+
+    @Test
+    public void testCreatePrincipals() {
+        createPrincipals();
+    }
+
+    @Test
+    public void testExportKeytabs() {
+        exportKeytabs();
+    }
+
+    @Test
+    public void testExportKeytab() {
+        exportKeytab();
+    }
+
+    @Test
+    public void testAddPrincipal() {
+        addPrincipal();
+    }
+
+    @Test
+    public void testGetPrincipals() {
+        getPrincipals();
+    }
+
+    @Test
+    public void testRenamePrincipal() {
+        renamePrincipal();
+    }
+
+    @Test
+    public void testXDeletePrincipal() {
+        deletePrincipal();
+    }
+
+    @Test
+    public void testSetConf() {
+        WebResource webResource = getWebResource("admin/setconf");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("isEnable", "true");
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonHasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonHasApi.java b/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonHasApi.java
new file mode 100644
index 0000000..56fffd9
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/kerby/has/server/json/TestJsonHasApi.java
@@ -0,0 +1,54 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server.json;
+
+import com.sun.jersey.api.client.WebResource;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.kerby.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import java.io.File;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestJsonHasApi extends TestRestApiBase {
+
+    @Test
+    public void hostRoles() {
+        WebResource webResource = getWebResource("hostroles");
+        String response = webResource.get(String.class);
+        System.out.println(response);
+    }
+
+    @Test
+    public void testKdcStart() {
+        kdcStart();
+        File backendDir = new File(testDir, "json-backend");
+        if (backendDir.exists()) {
+            FileUtil.fullyDelete(backendDir);
+        }
+    }
+
+    @Test
+    public void testKdcInit() {
+        kdcInit();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLConfApi.java b/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLConfApi.java
new file mode 100644
index 0000000..b5fe29c
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLConfApi.java
@@ -0,0 +1,70 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server.mysql;
+
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.kerby.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import javax.ws.rs.core.MultivaluedMap;
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestMySQLConfApi extends TestRestApiBase {
+
+    @Test
+    public void testConfigKdcBackend() throws IOException {
+        WebResource webResource = getWebResource("conf/configkdcbackend");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("backendType", "mysql");
+        params.add("driver", "org.h2.Driver");
+        params.add("url", "jdbc:h2:" + testDir.getCanonicalPath() + "/mysql-backend/mysqlbackend;MODE=MySQL");
+        params.add("user", "root");
+        params.add("password", "123456");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("MySQL backend set successfully.\n", response);
+    }
+
+    @Test
+    public void testConfigMySQLKdc() {
+        WebResource webResource = getWebResource("conf/configkdc");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("realm", "HADOOP.COM");
+        params.add("host", "localhost");
+        params.add("port", "8899");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("HAS server KDC set successfully.\n", response);
+    }
+
+    @Test
+    public void testGetKrb5Conf() {
+        getKrb5Conf();
+    }
+
+    @Test
+    public void testGetHasConf() {
+        getHasConf();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLHadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLHadminApi.java b/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLHadminApi.java
new file mode 100644
index 0000000..ba31f06
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLHadminApi.java
@@ -0,0 +1,64 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server.mysql;
+
+import org.apache.kerby.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestMySQLHadminApi extends TestRestApiBase {
+
+    @Test
+    public void testCreatePrincipals() {
+        createPrincipals();
+    }
+
+    @Test
+    public void testExportKeytabs() {
+        exportKeytabs();
+    }
+
+    @Test
+    public void testExportKeytab() {
+        exportKeytab();
+    }
+
+    @Test
+    public void testAddPrincipal() {
+        addPrincipal();
+    }
+
+    @Test
+    public void testGetPrincipals() {
+        getPrincipals();
+    }
+
+    @Test
+    public void testRenamePrincipal() {
+        renamePrincipal();
+    }
+
+    @Test
+    public void testXDeletePrincipal() {
+        deletePrincipal();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLHasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLHasApi.java b/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLHasApi.java
new file mode 100644
index 0000000..fbe01ab
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/kerby/has/server/mysql/TestMySQLHasApi.java
@@ -0,0 +1,46 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.server.mysql;
+
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.kerby.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import java.io.File;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestMySQLHasApi extends TestRestApiBase {
+
+    @Test
+    public void testKdcStart() {
+        kdcStart();
+        File backendDir = new File(testDir, "mysql-backend");
+        if (backendDir.exists()) {
+            FileUtil.fullyDelete(backendDir);
+        }
+    }
+
+    @Test
+    public void testKdcInit() {
+        kdcInit();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/pom.xml b/has/has-tool/has-client-tool/pom.xml
index 7ded9e1..adb6852 100644
--- a/has/has-tool/has-client-tool/pom.xml
+++ b/has/has-tool/has-client-tool/pom.xml
@@ -4,7 +4,7 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
         <artifactId>has-tool</artifactId>
-        <groupId>org.apache.hadoop</groupId>
+        <groupId>org.apache.kerby</groupId>
         <version>1.0.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
@@ -13,7 +13,7 @@
 
     <dependencies>
       <dependency>
-        <groupId>org.apache.hadoop</groupId>
+        <groupId>org.apache.kerby</groupId>
         <artifactId>has-client</artifactId>
         <version>${project.version}</version>
       </dependency>
@@ -23,7 +23,7 @@
         <version>${kerby.version}</version>
       </dependency>
       <dependency>
-        <groupId>org.apache.hadoop</groupId>
+        <groupId>org.apache.kerby</groupId>
         <artifactId>has-plugins</artifactId>
         <version>${project.version}</version>
       </dependency>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java
deleted file mode 100644
index 8756420..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HadminRemoteCmd;
-import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteAddPrincipalCmd;
-import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteCreatePrincipalsCmd;
-import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteDeletePrincipalCmd;
-import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteDisableConfCmd;
-import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteEnableConfCmd;
-import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteExportKeytabsCmd;
-import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteGetHostRolesCmd;
-import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteGetPrincipalsCmd;
-import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteRenamePrincipalCmd;
-import org.apache.kerby.util.OSUtil;
-
-import java.io.File;
-import java.util.Scanner;
-
-public class HadminRemoteTool {
-
-    private static final String PROMPT = HadminRemoteTool.class.getSimpleName() + ".remote";
-    private static final String USAGE = (OSUtil.isWindows()
-        ? "Usage: bin\\hadmin-remote.cmd" : "Usage: sh bin/hadmin-remote.sh")
-        + " <conf-file>\n"
-        + "\tExample:\n"
-        + "\t\t"
-        + (OSUtil.isWindows()
-        ? "bin\\hadmin-remote.cmd" : "sh bin/hadmin-remote.sh")
-        + " conf\n";
-
-    private static final String LEGAL_COMMANDS = "Available commands are: "
-        + "\n"
-        + "add_principal, addprinc\n"
-        + "                         Add principal\n"
-        + "delete_principal, delprinc\n"
-        + "                         Delete principal\n"
-        + "rename_principal, renprinc\n"
-        + "                         Rename principal\n"
-        + "list_principals, listprincs\n"
-        + "                         List principals\n"
-        + "get_hostroles, hostroles\n"
-        + "                         Get hostRoles\n"
-        + "export_keytabs, expkeytabs\n"
-        + "                         Export keytabs\n"
-        + "create_principals, creprincs\n"
-        + "                         Create principals\n"
-        + "enable_configure, enable\n"
-        + "                         Enable configure\n"
-        + "disable_configure, disable\n"
-        + "                         Disable configure\n";
-
-    public static void main(String[] args) {
-        HasAdminClient hadmin;
-        HasAuthAdminClient authHasAdminClient = null;
-
-        if (args.length < 1) {
-            System.err.println(USAGE);
-            System.exit(1);
-        }
-
-        String confDirPath = args[0];
-        File confFile = new File(confDirPath, "hadmin.conf");
-        HasConfig hasConfig;
-        try {
-            hasConfig = HasUtil.getHasConfig(confFile);
-        } catch (HasException e) {
-            System.err.println(e.getMessage());
-            return;
-        }
-
-        hadmin = new HasAdminClient(hasConfig);
-
-        if (hasConfig.getFilterAuthType().equals("kerberos")) {
-            authHasAdminClient = new HasAuthAdminClient(hasConfig);
-        }
-
-        System.out.println("enter \"cmd\" to see legal commands.");
-        System.out.print(PROMPT + ": ");
-
-        try (Scanner scanner = new Scanner(System.in, "UTF-8")) {
-            String input = scanner.nextLine();
-
-            while (!(input.equals("quit") || input.equals("exit") || input.equals("q"))) {
-                try {
-                    execute(hadmin, authHasAdminClient, input);
-                } catch (HasException e) {
-                    System.err.println(e.getMessage());
-                }
-                System.out.print(PROMPT + ": ");
-                input = scanner.nextLine();
-            }
-        }
-    }
-
-    private static void execute(HasAdminClient hadmin, HasAuthAdminClient hasAuthAdminClient,
-                               String input) throws HasException {
-        input = input.trim();
-        if (input.startsWith("cmd")) {
-            System.out.println(LEGAL_COMMANDS);
-            return;
-        }
-        HadminRemoteCmd executor;
-
-        String[] items = input.split("\\s+");
-        String cmd = items[0];
-
-        if (cmd.equals("add_principal")
-            || cmd.equals("addprinc")) {
-            executor = new HasRemoteAddPrincipalCmd(hadmin, hasAuthAdminClient);
-        } else if (cmd.equals("delete_principal")
-            || cmd.equals("delprinc")) {
-            executor = new HasRemoteDeletePrincipalCmd(hadmin, hasAuthAdminClient);
-        } else if (cmd.equals("rename_principal")
-            || cmd.equals("renprinc")) {
-            executor = new HasRemoteRenamePrincipalCmd(hadmin, hasAuthAdminClient);
-        } else if (cmd.equals("list_principals")
-            || cmd.equals("listprincs")) {
-            executor = new HasRemoteGetPrincipalsCmd(hadmin, hasAuthAdminClient);
-        } else if (cmd.equals("get_hostroles")
-            || cmd.equals("hostroles")) {
-            executor = new HasRemoteGetHostRolesCmd(hadmin, hasAuthAdminClient);
-        } else if (cmd.equals("create_principals")
-            || cmd.equals("creprincs")) {
-            executor = new HasRemoteCreatePrincipalsCmd(hadmin, hasAuthAdminClient);
-        } else if (cmd.equals("export_keytabs")
-            || cmd.equals("expkeytabs")) {
-            executor = new HasRemoteExportKeytabsCmd(hadmin, hasAuthAdminClient);
-        } else if (cmd.equals("enable_configure")
-            || cmd.equals("enable")) {
-            executor = new HasRemoteEnableConfCmd(hadmin, hasAuthAdminClient);
-        } else if (cmd.equals("disable_configure")
-            || cmd.equals("disable")) {
-            executor = new HasRemoteDisableConfCmd(hadmin, hasAuthAdminClient);
-        } else {
-            System.out.println(LEGAL_COMMANDS);
-            return;
-        }
-        executor.execute(items);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
deleted file mode 100644
index 81f6d98..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasException;
-
-public abstract class HadminRemoteCmd {
-
-    private HasAdminClient hadmin;
-    private HasAuthAdminClient authHadmin;
-
-    public HadminRemoteCmd(HasAdminClient hadmin, HasAuthAdminClient authHadminClient) {
-        this.hadmin = hadmin;
-        this.authHadmin = authHadminClient;
-    }
-
-    protected HasAdminClient getHadmin() {
-        return hadmin;
-    }
-
-    protected HasAuthAdminClient getAuthHadmin() {
-        return authHadmin;
-    }
-
-    /**
-     * Execute the hadmin cmd.
-     * @param input Input cmd to execute
-     */
-    public abstract void execute(String[] input) throws HasException;
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
deleted file mode 100644
index 39a24d0..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasException;
-
-/**
- * Remote add principal cmd
- */
-public class HasRemoteAddPrincipalCmd extends HadminRemoteCmd {
-
-    public static final String USAGE = "Usage: add_principal [options] <principal-name>\n"
-        + "\toptions are:\n"
-        + "\t\t[-randkey]\n"
-        + "\t\t[-pw password]"
-        + "\tExample:\n"
-        + "\t\tadd_principal -pw mypassword alice\n";
-
-    public HasRemoteAddPrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
-        super(hadmin, authHadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        if (items.length < 2) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        String clientPrincipal = items[items.length - 1];
-
-        HasAdminClient hasAdminClient;
-        if (getAuthHadmin() != null) {
-            hasAdminClient = getAuthHadmin();
-        } else {
-            hasAdminClient = getHadmin();
-        }
-
-        if (!items[1].startsWith("-")) {
-            hasAdminClient.addPrincipal(clientPrincipal);
-        } else if (items[1].startsWith("-randkey")) {
-            hasAdminClient.addPrincipal(clientPrincipal);
-        } else if (items[1].startsWith("-pw")) {
-            String password = items[2];
-            hasAdminClient.addPrincipal(clientPrincipal, password);
-        } else {
-            System.err.println("add_principal cmd format error.");
-            System.err.println(USAGE);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
deleted file mode 100644
index aa79e23..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasException;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-
-public class HasRemoteCreatePrincipalsCmd extends HadminRemoteCmd {
-    private static final String USAGE = "\nUsage: create_principals [hostRoles-file]\n"
-            + "\t'hostRoles-file' is a file with a hostRoles json string like:\n"
-            + "\t\t{HOSTS: [ {\"name\":\"host1\",\"hostRoles\":\"HDFS\"}, "
-            + "{\"name\":\"host2\",\"hostRoles\":\"HDFS,HBASE\"} ] }\n"
-            + "\tExample:\n"
-            + "\t\tcreate_principals hostroles.txt\n";
-
-    public HasRemoteCreatePrincipalsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
-        super(hadmin, authHadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        //String param = items[0];
-        if (items.length != 2) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        File hostRoles = new File(items[1]);
-        if (!hostRoles.exists()) {
-            System.err.println("HostRoles file is not exists.");
-            return;
-        }
-
-        HasAdminClient hasAdminClient;
-        if (getAuthHadmin() != null) {
-            hasAdminClient = getAuthHadmin();
-        } else {
-            hasAdminClient = getHadmin();
-        }
-
-        BufferedReader reader;
-        try {
-            reader = new BufferedReader(new FileReader(hostRoles));
-        } catch (FileNotFoundException e) {
-            throw new HasException("File not exist", e);
-        }
-        StringBuilder sb = new StringBuilder();
-        String tempString;
-        try {
-            while ((tempString = reader.readLine()) != null) {
-                sb.append(tempString);
-            }
-        } catch (IOException e) {
-            throw new HasException("Errors occurred when read line. ", e);
-        }
-        hasAdminClient.requestCreatePrincipals(sb.toString());
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
deleted file mode 100644
index 260ff2c..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasException;
-
-import java.io.Console;
-import java.util.Scanner;
-
-/**
- * Remote delete principal cmd
- */
-public class HasRemoteDeletePrincipalCmd extends HadminRemoteCmd {
-
-    public static final String USAGE = "Usage: delete_principal <principal-name>\n"
-        + "\tExample:\n"
-        + "\t\tdelete_principal alice\n";
-
-    public HasRemoteDeletePrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
-        super(hadmin, authHadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        if (items.length < 2) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        HasAdminClient hasAdminClient;
-        if (getAuthHadmin() != null) {
-            hasAdminClient = getAuthHadmin();
-        } else {
-            hasAdminClient = getHadmin();
-        }
-
-        String principal = items[items.length - 1];
-        String reply;
-        Console console = System.console();
-        String prompt = "Are you sure to delete the principal? (yes/no, YES/NO, y/n, Y/N) ";
-        if (console == null) {
-            System.out.println("Couldn't get Console instance, "
-                + "maybe you're running this from within an IDE. "
-                + "Use scanner to read password.");
-            Scanner scanner = new Scanner(System.in, "UTF-8");
-            reply = getReply(scanner, prompt);
-        } else {
-            reply = getReply(console, prompt);
-        }
-        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
-            hasAdminClient.deletePrincipal(principal);
-        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
-            System.out.println("Principal \"" + principal + "\"  not deleted.");
-        } else {
-            System.err.println("Unknown request, fail to delete the principal.");
-            System.err.println(USAGE);
-        }
-    }
-
-    private String getReply(Scanner scanner, String prompt) {
-        System.out.println(prompt);
-        return scanner.nextLine().trim();
-    }
-
-    private String getReply(Console console, String prompt) {
-        console.printf(prompt);
-        String line = console.readLine();
-        return line;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
deleted file mode 100644
index 30027b3..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasException;
-
-/**
- * Remote add principal cmd
- */
-public class HasRemoteDisableConfCmd extends HadminRemoteCmd {
-
-    public static final String USAGE = "Usage: disable_configure\n"
-            + "\tExample:\n"
-            + "\t\tdisable\n";
-
-    public HasRemoteDisableConfCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
-        super(hadmin, authHadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        HasAdminClient hasAdminClient;
-        if (getAuthHadmin() != null) {
-            hasAdminClient = getAuthHadmin();
-        } else {
-            hasAdminClient = getHadmin();
-        }
-        hasAdminClient.setEnableOfConf("false");
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
deleted file mode 100644
index 852d487..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasException;
-
-/**
- * Remote add principal cmd
- */
-public class HasRemoteEnableConfCmd extends HadminRemoteCmd {
-
-    public static final String USAGE = "Usage: enable_configure\n"
-            + "\tExample:\n"
-            + "\t\tenable\n";
-
-    public HasRemoteEnableConfCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
-        super(hadmin, authHadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        HasAdminClient hasAdminClient;
-        if (getAuthHadmin() != null) {
-            hasAdminClient = getAuthHadmin();
-        } else {
-            hasAdminClient = getHadmin();
-        }
-        hasAdminClient.setEnableOfConf("true");
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
deleted file mode 100644
index ead3b28..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasException;
-
-public class HasRemoteExportKeytabsCmd extends HadminRemoteCmd {
-    private static final String USAGE = "\nUsage: export_keytabs <host> [role]\n"
-            + "\tExample:\n"
-            + "\t\texport_keytabs host1 HDFS\n";
-
-    public HasRemoteExportKeytabsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
-        super(hadmin, authHadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        //TODO add save path option
-        //String param = items[0];
-        if (items.length < 2) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        HasAdminClient hasAdminClient;
-        if (getAuthHadmin() != null) {
-            hasAdminClient = getAuthHadmin();
-        } else {
-            hasAdminClient = getHadmin();
-        }
-
-        String host = items[1];
-        String role = "";
-        if (items.length >= 3) {
-            role = items[2];
-        }
-        hasAdminClient.getKeytabByHostAndRole(host, role);
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
deleted file mode 100644
index 70b9ea7..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasException;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-
-public class HasRemoteGetHostRolesCmd extends HadminRemoteCmd {
-    private static final String USAGE = "Usage: get_hostroles\n"
-            + "\tExample:\n"
-            + "\t\tget_hostroles\n";
-
-    public HasRemoteGetHostRolesCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
-        super(hadmin, authHadmin);
-    }
-
-    @Override
-    public void execute(String[] input) throws HasException {
-        HasAdminClient hasAdminClient = getHadmin();
-        String result = hasAdminClient.getHostRoles();
-
-        if (result != null) {
-            try {
-                JSONArray hostRoles = new JSONArray(result);
-                for (int i = 0; i < hostRoles.length(); i++) {
-                    JSONObject hostRole = hostRoles.getJSONObject(i);
-                    System.out.print("\tHostRole: " + hostRole.getString("HostRole")
-                            + ", PrincipalNames: ");
-                    JSONArray principalNames = hostRole.getJSONArray("PrincipalNames");
-                    for (int j = 0; j < principalNames.length(); j++) {
-                        System.out.print(principalNames.getString(j));
-                        if (j == principalNames.length() - 1) {
-                            System.out.println();
-                        } else {
-                            System.out.print(", ");
-                        }
-                    }
-                }
-            } catch (JSONException e) {
-                throw new HasException("Errors occurred when getting the host roles.", e);
-            }
-        } else {
-            throw new HasException("Could not get hostRoles.");
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
deleted file mode 100644
index 05d6970..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasException;
-
-import java.util.List;
-
-public class HasRemoteGetPrincipalsCmd extends HadminRemoteCmd {
-    private static final String USAGE = "Usage: list_principals [expression]\n"
-            + "\t'expression' is a shell-style glob expression that can contain the wild-card characters ?, *, and []."
-            + "\tExample:\n"
-            + "\t\tlist_principals [expression]\n";
-
-    public HasRemoteGetPrincipalsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
-        super(hadmin, authHadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        if (items.length > 2) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        HasAdminClient hasAdminClient;
-        if (getAuthHadmin() != null) {
-            hasAdminClient = getAuthHadmin();
-        } else {
-            hasAdminClient = getHadmin();
-        }
-
-        List<String> principalLists = null;
-
-        if (items.length == 1) {
-            try {
-                principalLists = hasAdminClient.getPrincipals();
-            } catch (Exception e) {
-                System.err.println("Errors occurred when getting the principals. " + e.getMessage());
-            }
-        } else {
-            //have expression
-            String exp = items[1];
-            principalLists = hasAdminClient.getPrincipals(exp);
-        }
-
-        if (principalLists.size() == 0 || principalLists.size() == 1 && principalLists.get(0).isEmpty()) {
-            return;
-        } else {
-            System.out.println("Principals are listed:");
-            for (int i = 0; i < principalLists.size(); i++) {
-                System.out.println(principalLists.get(i));
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
deleted file mode 100644
index f900f3a..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.common.HasException;
-
-import java.io.Console;
-import java.util.Scanner;
-
-/**
- * Remote rename principal cmd
- */
-public class HasRemoteRenamePrincipalCmd extends HadminRemoteCmd {
-    public static final String USAGE = "Usage: rename_principal <old_principal_name>"
-        + " <new_principal_name>\n"
-        + "\tExample:\n"
-        + "\t\trename_principal alice bob\n";
-
-    public HasRemoteRenamePrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
-        super(hadmin, authHadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws HasException {
-        if (items.length < 3) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        HasAdminClient hasAdminClient;
-        if (getAuthHadmin() != null) {
-            hasAdminClient = getAuthHadmin();
-        } else {
-            hasAdminClient = getHadmin();
-        }
-
-        String oldPrincipalName = items[items.length - 2];
-        String newPrincipalName = items[items.length - 1];
-
-        String reply;
-        Console console = System.console();
-        String prompt = "Are you sure to rename the principal? (yes/no, YES/NO, y/n, Y/N) ";
-        if (console == null) {
-            System.out.println("Couldn't get Console instance, "
-                + "maybe you're running this from within an IDE. "
-                + "Use scanner to read password.");
-            Scanner scanner = new Scanner(System.in, "UTF-8");
-            reply = getReply(scanner, prompt);
-        } else {
-            reply = getReply(console, prompt);
-        }
-        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
-            hasAdminClient.renamePrincipal(oldPrincipalName, newPrincipalName);
-        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
-            System.out.println("Principal \"" + oldPrincipalName + "\"  not renamed.");
-        } else {
-            System.err.println("Unknown request, fail to rename the principal.");
-            System.err.println(USAGE);
-        }
-    }
-
-    private String getReply(Scanner scanner, String prompt) {
-        System.out.println(prompt);
-        return scanner.nextLine().trim();
-    }
-
-    private String getReply(Console console, String prompt) {
-        console.printf(prompt);
-        String line = console.readLine();
-        return line;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hclient/HasClientLoginTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hclient/HasClientLoginTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hclient/HasClientLoginTool.java
deleted file mode 100644
index 906b6fb..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hclient/HasClientLoginTool.java
+++ /dev/null
@@ -1,269 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.hclient;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.client.HasAuthAdminClient;
-import org.apache.hadoop.has.client.HasClient;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasJaasLoginUtil;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.server.KdcConfig;
-import org.apache.kerby.kerberos.kerb.server.KdcUtil;
-import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
-import org.apache.kerby.util.OSUtil;
-
-import javax.security.auth.Subject;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
-
-public class HasClientLoginTool {
-    private static List<String> principalList = new ArrayList<String>();
-    private static List<File>  keytabList = new ArrayList<File>();
-
-    private static final String KEYTAB_USAGE = (OSUtil.isWindows()
-        ? "Usage: bin\\k=login-test.cmd" : "Usage: sh bin/login-test.sh")
-        + " [add|run|delete] [conf_dir] [work_dir] [number]\n"
-        + "\n";
-
-    private static final String TGT_USAGE = (OSUtil.isWindows()
-        ? "Usage: bin\\k=login-test.cmd" : "Usage: sh bin/login-test.sh")
-        + " tgt [conf_dir]\n"
-        + "\n";
-
-    private static void printKeytabUsage(String error) {
-        System.err.println(error + "\n");
-        System.err.println(KEYTAB_USAGE);
-        System.exit(-1);
-    }
-
-    private static void printTgtUsage(String error) {
-        System.err.println(error + "\n");
-        System.err.println(TGT_USAGE);
-        System.exit(-1);
-    }
-
-    public static class Task implements Runnable {
-        private int index;
-
-        Task(int index) {
-            this.index = index;
-        }
-
-        @Override
-        public void run() {
-            Subject subject = null;
-            try {
-                subject = HasJaasLoginUtil.loginUsingKeytab(principalList.get(index),
-                    keytabList.get(index));
-            } catch (IOException e) {
-                System.err.println("Fail to login using keytab. " + e);
-            }
-            System.out.println("Login succeeded for user: "
-                + subject.getPrincipals().iterator().next());
-        }
-    }
-
-    public static void main(String[] args) {
-
-        String cmd = args[0];
-        File confDir;
-        File workDir;
-
-        if (cmd.equals("tgt")) {
-            if (args.length != 2) {
-                printTgtUsage("Need 2 args.");
-                return;
-            }
-
-            confDir = new File(args[1]);
-            if (!confDir.exists()) {
-                printTgtUsage("Need the valid conf dir.");
-                return;
-            }
-            File confFile = new File(confDir, "hadmin.conf");
-            HasConfig hasConfig;
-            try {
-                hasConfig = HasUtil.getHasConfig(confFile);
-            } catch (HasException e) {
-                System.err.println(e.getMessage());
-                return;
-            }
-            if (hasConfig == null) {
-                System.err.println("hadmin.conf not exist in " + confDir.getAbsolutePath());
-                return;
-            }
-            String host = hasConfig.getHttpsHost();
-            String port = hasConfig.getHttpsPort();
-
-            HasClient hasClient = new HasClient();
-            TgtTicket tgtTicket;
-            try {
-                tgtTicket = hasClient.requestTgt();
-            } catch (HasException e) {
-                System.err.println("Errors occurred when getting TGT. " + e.getMessage());
-                return;
-            }
-
-            System.out.println("Get the tgt ticket successfully!");
-            System.out.println("The client principal of tgt ticket: " + tgtTicket.getClientPrincipal());
-
-            Subject subject = null;
-            try {
-                subject = HasJaasLoginUtil.loginUserFromTgtTicket(
-                    "https://" + host + ":" + port + "/has/v1?auth_type=RAM");
-            } catch (IOException e) {
-                System.err.println("Errors occurred when login user with TGT. " + e.getMessage());
-                return;
-            }
-
-            System.out.println("Principal: " + subject.getPrincipals().iterator().next());
-        } else {
-            if (args.length != 4) {
-                printKeytabUsage("Need 4 args.");
-                return;
-            }
-
-            confDir = new File(args[1]);
-            workDir = new File(args[2]);
-
-            if (!confDir.exists()) {
-                printKeytabUsage("Need the valid conf dir.");
-                return;
-            }
-            if (!workDir.exists()) {
-                printKeytabUsage("Need the valid work dir.");
-                return;
-            }
-
-            int taskNum = Integer.parseInt(args[3]);
-
-            System.out.println("The task num is: " + taskNum);
-
-            if (taskNum <= 0) {
-                printKeytabUsage("The task num must be greater than zero");
-                System.exit(-1);
-            }
-
-            HasAdminClient hasAdminClient;
-            HasAuthAdminClient authHasAdminClient = null;
-            File confFile = new File(confDir, "hadmin.conf");
-            HasConfig hasConfig = null;
-            try {
-                hasConfig = HasUtil.getHasConfig(confFile);
-            } catch (HasException e) {
-                System.err.println(e.getMessage());
-                return;
-            }
-
-            if (hasConfig == null) {
-                System.err.println("hadmin.conf not exist in " + confDir.getAbsolutePath());
-                return;
-            }
-
-            if (hasConfig.getFilterAuthType().equals("kerberos")) {
-                authHasAdminClient = new HasAuthAdminClient(hasConfig);
-            }
-            if (authHasAdminClient != null) {
-                hasAdminClient = authHasAdminClient;
-            } else {
-                hasAdminClient = new HasAdminClient(hasConfig);
-            }
-            String realm = null;
-            try {
-                KdcConfig kdcConfig = KdcUtil.getKdcConfig(confDir);
-                realm = kdcConfig.getKdcRealm();
-            } catch (KrbException e) {
-                printKeytabUsage(e.getMessage());
-            }
-
-            if (cmd.equals("add")) {
-                for (int i = 0; i < taskNum; i++) {
-                    String principal = "test" + i + "@" + realm;
-                    try {
-                        hasAdminClient.addPrincipal(principal);
-                    } catch (HasException e) {
-                        System.err.println("Errors occurred when adding principal. "
-                            + e.getMessage());
-                        return;
-                    }
-                    File keytabFile = new File(workDir, i + ".keytab");
-                    try {
-                        hasAdminClient.exportKeytab(keytabFile, principal);
-                    } catch (HasException e) {
-                        System.err.println("Errors occurred when exporting the keytabs. "
-                            + e.getMessage());
-                        return;
-                    }
-                    System.out.println("Add principals and keytabs successfully.");
-                }
-            } else if (cmd.equals("run")) {
-                ExecutorService exec;
-                for (int i = 0; i < taskNum; i++) {
-                    String principal = "test" + i + "@" + realm;
-                    principalList.add(i, principal);
-                    File file = new File(workDir, i + ".keytab");
-                    keytabList.add(i, file);
-                }
-                System.out.println("Start the login test.");
-                Long startTime = System.currentTimeMillis();
-                exec = Executors.newFixedThreadPool(5);
-                for (int i = 0; i < taskNum; ++i) {
-                    exec.submit(new Task(i));
-                }
-                exec.shutdown();
-                try {
-                    exec.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
-                } catch (InterruptedException e) {
-                    System.err.println(e.getMessage());
-                    return;
-                }
-                Long endTime = System.currentTimeMillis();
-                System.out.println("Finish the login test.");
-                System.out.println("Cost time: " + (endTime - startTime) + "ms");
-            } else if (cmd.equals("delete")) {
-                for (int i = 0; i < taskNum; i++) {
-                    String principal = "test" + i + "@" + realm;
-                    try {
-                        hasAdminClient.deletePrincipal(principal);
-                    } catch (HasException e) {
-                        System.err.println("Errors occurred when deleting the principal. "
-                            + e.getMessage());
-                        continue;
-                    }
-                    File file = new File(workDir, i + ".keytab");
-                    if (!file.delete()) {
-                        System.err.println("Failed to delete " + i + ".keytab.");
-                    }
-                }
-                System.out.println("Delete principals and keytabs successfully.");
-            } else {
-                printKeytabUsage("Need the cmd with add, run or delete.");
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/HasInitTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/HasInitTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/HasInitTool.java
deleted file mode 100644
index 1f46305..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/HasInitTool.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.kdcinit;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.tool.client.kdcinit.cmd.*;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.util.OSUtil;
-
-import java.io.File;
-import java.util.Scanner;
-
-public class HasInitTool {
-    private static final String PROMPT = HasInitTool.class.getSimpleName();
-    private static final String USAGE = (OSUtil.isWindows()
-            ? "Usage: bin\\hadmin.cmd" : "Usage: sh bin/kdcinit.sh")
-            + " <conf-file>\n"
-            + "\tExample:\n"
-            + "\t\t"
-            + (OSUtil.isWindows()
-            ? "bin\\kdcinit.cmd" : "sh bin/kdcinit.sh")
-            + " conf\n";
-
-    private static final String LEGAL_COMMANDS = "Available commands are: "
-            + "\n"
-            + "get_krb5conf, getkrb5\n"
-            + "                         Get krb5.conf\n"
-            + "get_hasConf, gethas\n"
-            + "                         Get has-client.conf\n"
-            + "set_plugin, setplugin\n"
-            + "                         Set plugin\n"
-            + "config_kdcBackend, confbackend\n"
-            + "                         Config kdc backend\n"
-            + "config_kdc, confkdc\n"
-            + "                         Config kdc\n"
-            + "start_kdc, start\n"
-            + "                         Start kdc\n"
-            + "init_kdc, init\n"
-            + "                         Init kdc\n";
-
-    public static void main(String[] args) {
-        if (args.length < 1) {
-            System.err.println(USAGE);
-            System.exit(1);
-        }
-        String confDirPath = args[0];
-        File confFile = new File(confDirPath, "hadmin.conf");
-        HasConfig hasConfig;
-        try {
-            hasConfig = HasUtil.getHasConfig(confFile);
-        } catch (HasException e) {
-            System.err.println(e.getMessage());
-            return;
-        }
-
-        System.out.println(LEGAL_COMMANDS);
-        System.out.println("enter \"<cmd> [?][-help]\" to get cmd help.");
-        Scanner scanner = new Scanner(System.in, "UTF-8");
-        System.out.print(PROMPT + ": ");
-        String input = scanner.nextLine();
-
-        HasAdminClient hadmin = new HasAdminClient(hasConfig, new File(confDirPath));
-        while (!(input.equals("quit") || input.equals("exit") || input.equals("q"))) {
-            try {
-                execute(hadmin, input);
-            } catch (KrbException e) {
-                System.err.println(e.getMessage());
-            }
-            System.out.print(PROMPT + ": ");
-            input = scanner.nextLine();
-        }
-    }
-
-    private static void execute(HasAdminClient hadmin, String input) throws KrbException {
-        input = input.trim();
-        if (input.startsWith("cmd")) {
-            System.out.println(LEGAL_COMMANDS);
-            return;
-        }
-        String[] items = input.split("\\s+");
-        String cmd = items[0];
-
-        KdcInitCmd executor;
-        if (cmd.equals("get_krb5conf")
-                || cmd.equals("getkrb5")) {
-            executor = new HasGetKrb5confCmd(hadmin);
-        } else if (cmd.equals("get_hasConf")
-                || cmd.equals("gethas")) {
-            executor = new HasGetHasconfCmd(hadmin);
-        } else if (cmd.equals("set_plugin")
-                || cmd.equals("setplugin")) {
-            executor = new HasSetPluginCmd(hadmin);
-        } else if (cmd.equals("config_kdcBackend")
-                || cmd.equals("confbackend")) {
-            executor = new HasConfKdcBackendCmd(hadmin);
-        } else if (cmd.equals("config_kdc")
-                || cmd.equals("confkdc")) {
-            executor = new HasConfKdcCmd(hadmin);
-        } else if (cmd.equals("start_kdc")
-                || cmd.equals("start")) {
-            executor = new HasStartKdcCmd(hadmin);
-        } else if (cmd.equals("init_kdc")
-                || cmd.equals("init")) {
-            executor = new HasInitKdcCmd(hadmin);
-        } else {
-            System.out.println(LEGAL_COMMANDS);
-            return;
-        }
-        executor.execute(items);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java
deleted file mode 100644
index 002e936..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.kdcinit.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.kerby.kerberos.kerb.KrbException;
-
-/**
- * Remote config kdc cmd
- */
-public class HasConfKdcBackendCmd extends KdcInitCmd {
-
-    public static final String USAGE = "Usage: config_kdcBackend <backendType> [dir] [url] [user]"
-        + " [password]\n"
-        + "\tSupported backendType : json,mysql\n"
-        + "\tExample:\n"
-        + "\t\tconfig_kdcBackend json /tmp/has/jsonbackend \n"
-        + "\t\tconfig_kdcBackend mysql jdbc:mysql://127.0.0.1:3306/mysqlbackend root passwd\n";
-
-    public HasConfKdcBackendCmd(HasAdminClient hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws KrbException {
-        if (items.length >= 2) {
-            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
-                System.out.println(USAGE);
-                return;
-            }
-        }
-        if (items.length < 3) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        HasAdminClient hasAdminClient = getHadmin();
-        if (items.length >= 3 && items[1].equals("json")) {
-            hasAdminClient.configKdcBackend(items[1], items[2],
-                    null, null, null);
-        } else if (items.length >= 5 && items[1].equals("mysql")) {
-            hasAdminClient.configKdcBackend(items[1], null,
-                    items[2], items[3], items[4]);
-        } else {
-            System.err.println(USAGE);
-            return;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java
deleted file mode 100644
index fdd3e92..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.kdcinit.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.kerby.kerberos.kerb.KrbException;
-
-/**
- * Remote config kdc cmd
- */
-public class HasConfKdcCmd extends KdcInitCmd {
-
-    public static final String USAGE = "Usage: config_kdc <host> <port> <realm>\n"
-        + "\tExample:\n"
-        + "\t\tconfig_kdc localhost 88 HADOOP.COM\n";
-
-    public HasConfKdcCmd(HasAdminClient hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws KrbException {
-        if (items.length >= 2) {
-            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
-                System.out.println(USAGE);
-                return;
-            }
-        }
-        if (items.length < 4) {
-            System.err.println(USAGE);
-            return;
-        }
-
-        HasAdminClient hasAdminClient = getHadmin();
-        hasAdminClient.configKdc(items[2], items[3], items[1]);
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java
deleted file mode 100644
index 3011cb4..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.kdcinit.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.kerby.kerberos.kerb.KrbException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.PrintStream;
-
-/**
- * Remote get has-client.conf cmd
- */
-public class HasGetHasconfCmd extends KdcInitCmd {
-
-    public static final String USAGE = "Usage: get_hasConf [-p] [path]\n"
-        + "\tExample:\n"
-        + "\t\tget_hasConf\n";
-
-    public HasGetHasconfCmd(HasAdminClient hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws KrbException {
-        if (items.length >= 2) {
-            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
-                System.out.println(USAGE);
-                return;
-            }
-        }
-        File path = getHadmin().getConfDir();
-        if (items.length >= 3 && items[1].startsWith("-p")) {
-            path = new File(items[2]);
-            if (!path.exists()) {
-                if (!path.mkdirs()) {
-                    System.err.println("Cannot create file : " + items[2]);
-                    return;
-                }
-            }
-        }
-        File hasConf = new File(path, "has-client.conf");
-
-        HasAdminClient hasAdminClient = getHadmin();
-        String content = hasAdminClient.getHasconf();
-        if (content == null) {
-            System.err.println("Failed to get has.conf.");
-            return;
-        }
-        try {
-            PrintStream ps = new PrintStream(new FileOutputStream(hasConf));
-            ps.println(content);
-            System.out.println("has-client.conf has saved in : " + hasConf.getAbsolutePath());
-        } catch (FileNotFoundException e) {
-            System.err.println(e.getMessage());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java
deleted file mode 100644
index 4b39ac8..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.kdcinit.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.kerby.kerberos.kerb.KrbException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.PrintStream;
-
-/**
- * Remote get krb5.conf cmd
- */
-public class HasGetKrb5confCmd extends KdcInitCmd {
-
-    public static final String USAGE = "Usage: get_krb5conf [-p] [path]\n"
-        + "\tExample:\n"
-        + "\t\tget_krb5conf -p /tmp/has\n";
-
-    public HasGetKrb5confCmd(HasAdminClient hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws KrbException {
-        if (items.length >= 2) {
-            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
-                System.out.println(USAGE);
-                return;
-            }
-        }
-        File path = getHadmin().getConfDir();
-        if (items.length >= 3 && items[1].startsWith("-p")) {
-            path = new File(items[2]);
-            if (!path.exists()) {
-                if (!path.mkdirs()) {
-                    System.err.println("Cannot create file : " + items[2]);
-                    return;
-                }
-            }
-        }
-        File krb5Conf = new File(path, "krb5.conf");
-
-        HasAdminClient hasAdminClient = getHadmin();
-        String content = hasAdminClient.getKrb5conf();
-        if (content == null) {
-            System.err.println("Failed to get krb5.conf.");
-            return;
-        }
-        try {
-            PrintStream ps = new PrintStream(new FileOutputStream(krb5Conf));
-            ps.println(content);
-            System.out.println("krb5.conf has saved in : " + krb5Conf.getAbsolutePath());
-        } catch (FileNotFoundException e) {
-            System.err.println(e.getMessage());
-        }
-    }
-}


[06/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/WebConfigKey.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/WebConfigKey.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/WebConfigKey.java
new file mode 100644
index 0000000..ff31229
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/WebConfigKey.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kerby.has.server.web;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+
+/** 
+ * This class contains constants for configuration keys and default values
+ * used in hdfs.
+ */
+@InterfaceAudience.Private
+public class WebConfigKey {
+
+  public static final int HAS_HTTP_PORT_DEFAULT = 9870;
+  public static final String HAS_HTTP_HOST_DEFAULT = "0.0.0.0";
+  public static final String HAS_HTTP_ADDRESS_KEY = "has.http-address";
+  public static final String HAS_HTTP_ADDRESS_DEFAULT = HAS_HTTP_HOST_DEFAULT + ":" + HAS_HTTP_PORT_DEFAULT;
+
+  public static final String HAS_HTTPS_BIND_HOST_KEY = "has.https-bind-host";
+  public static final int HAS_HTTPS_PORT_DEFAULT = 9871;
+  public static final String HAS_HTTPS_HOST_DEFAULT = "0.0.0.0";
+  public static final String HAS_HTTPS_ADDRESS_KEY = "has.https-address";
+  public static final String HAS_HTTPS_ADDRESS_DEFAULT = HAS_HTTPS_HOST_DEFAULT + ":" + HAS_HTTPS_PORT_DEFAULT;
+  public static final String HAS_HTTP_POLICY_KEY = "has.http.policy";
+  public static final String HAS_HTTP_POLICY_DEFAULT = HttpConfig.Policy.HTTPS_ONLY.name();
+
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY = "has.https.server.keystore.resource";
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT = "ssl-server.xml";
+  public static final String HAS_SERVER_HTTPS_KEYPASSWORD_KEY = "ssl.server.keystore.keypassword";
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY = "ssl.server.keystore.password";
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_LOCATION_KEY = "ssl.server.keystore.location";
+  public static final String HAS_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY = "ssl.server.truststore.location";
+  public static final String HAS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY = "ssl.server.truststore.password";
+  public static final String HAS_CLIENT_HTTPS_NEED_AUTH_KEY = "has.client.https.need-auth";
+  public static final boolean HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT = false;
+
+  public static final String HAS_AUTHENTICATION_FILTER_KEY = "has.web.authentication.filter";
+  public static final String HAS_AUTHENTICATION_FILTER_DEFAULT = AuthenticationFilter.class.getName();
+
+  public static final String HAS_AUTHENTICATION_FILTER_AUTH_TYPE = "has.authentication.filter.auth.type";
+  public static final String HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY = "has.authentication.kerberos.principal";
+  public static final String HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY = "has.authentication.kerberos.keytab";
+  public static final String HAS_AUTHENTICATION_KERBEROS_NAME_RULES = "has.authentication.kerberos.name.rules";
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/WebServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/WebServer.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/WebServer.java
new file mode 100644
index 0000000..cf8fd0a
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/WebServer.java
@@ -0,0 +1,348 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.kerby.has.server.web;
+
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.server.HasServer;
+import org.apache.kerby.has.server.web.rest.HasApi;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.ServletContext;
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.Map;
+
+public class WebServer {
+    public static final Logger LOG = LoggerFactory.getLogger(WebServer.class);
+
+    private HttpServer2 httpServer;
+    private final HasConfig conf;
+
+    private InetSocketAddress httpAddress;
+    private InetSocketAddress httpsAddress;
+
+    protected static final String HAS_SERVER_ATTRIBUTE_KEY = "hasserver";
+
+    public WebServer(HasConfig conf) {
+        this.conf = conf;
+    }
+
+    public HasConfig getConf() {
+        return conf;
+    }
+
+    private void init() {
+
+        final String pathSpec = "/has/v1/*";
+
+        // add has packages
+        httpServer.addJerseyResourcePackage(HasApi.class
+                .getPackage().getName(),
+            pathSpec);
+    }
+
+    public void defineFilter() {
+        String authType = conf.getString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE);
+        if (authType.equals("kerberos")) {
+            // add authentication filter for webhdfs
+            final String className = conf.getString(
+                WebConfigKey.HAS_AUTHENTICATION_FILTER_KEY,
+                WebConfigKey.HAS_AUTHENTICATION_FILTER_DEFAULT);
+
+            final String name = className;
+
+            Map<String, String> params = getAuthFilterParams(conf);
+
+            String adminPathSpec = "/has/v1/admin/*";
+            HttpServer2.defineFilter(httpServer.getWebAppContext(), name, className,
+                params, new String[]{adminPathSpec});
+            HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
+                + ")");
+        }
+    }
+
+    public void defineConfFilter() {
+        String confFilterName = ConfFilter.class.getName();
+        String confPath = "/has/v1/conf/*";
+        HttpServer2.defineFilter(httpServer.getWebAppContext(), confFilterName, confFilterName,
+                getAuthFilterParams(conf), new String[]{confPath});
+        HttpServer2.LOG.info("Added filter '" + confFilterName + "' (class=" + confFilterName
+                + ")");
+    }
+
+    private Map<String, String> getAuthFilterParams(HasConfig conf) {
+        Map<String, String> params = new HashMap<String, String>();
+
+        String authType = conf.getString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE);
+        if (authType != null && !authType.isEmpty()) {
+            params.put(AuthenticationFilter.AUTH_TYPE, authType);
+        }
+        String principal = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
+        if (principal != null && !principal.isEmpty()) {
+            try {
+                principal = SecurityUtil.getServerPrincipal(principal,
+                    getHttpsAddress().getHostName());
+            } catch (IOException e) {
+                LOG.warn("Errors occurred when get server principal. " + e.getMessage());
+            }
+            params.put(KerberosAuthenticationHandler.PRINCIPAL, principal);
+        }
+        String keytab = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
+        if (keytab != null && !keytab.isEmpty()) {
+            params.put(KerberosAuthenticationHandler.KEYTAB, keytab);
+        }
+        String rule = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_NAME_RULES);
+        if (rule != null && !rule.isEmpty()) {
+            params.put(KerberosAuthenticationHandler.NAME_RULES, rule);
+        } else {
+            params.put(KerberosAuthenticationHandler.NAME_RULES, "DEFAULT");
+        }
+        return params;
+    }
+
+    public InetSocketAddress getBindAddress() {
+        if (httpAddress != null) {
+            return httpAddress;
+        } else if (httpsAddress != null) {
+            return httpsAddress;
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * for information related to the different configuration options and
+     * Http Policy is decided.
+     */
+    public void start() throws HasException {
+
+        HttpConfig.Policy policy = getHttpPolicy(conf);
+
+        final String bindHost =
+            conf.getString(WebConfigKey.HAS_HTTPS_BIND_HOST_KEY);
+        InetSocketAddress httpAddr = null;
+        if (policy.isHttpEnabled()) {
+            final String httpAddrString = conf.getString(
+                WebConfigKey.HAS_HTTP_ADDRESS_KEY,
+                WebConfigKey.HAS_HTTP_ADDRESS_DEFAULT);
+            httpAddr = NetUtils.createSocketAddr(httpAddrString);
+            if (bindHost != null && !bindHost.isEmpty()) {
+                httpAddr = new InetSocketAddress(bindHost, httpAddr.getPort());
+            }
+            LOG.info("Get the http address: " + httpAddr);
+        }
+
+        InetSocketAddress httpsAddr = null;
+        if (policy.isHttpsEnabled()) {
+            final String httpsAddrString = conf.getString(
+                WebConfigKey.HAS_HTTPS_ADDRESS_KEY,
+                WebConfigKey.HAS_HTTPS_ADDRESS_DEFAULT);
+            httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
+
+            if (bindHost != null && !bindHost.isEmpty()) {
+                httpsAddr = new InetSocketAddress(bindHost, httpsAddr.getPort());
+            }
+            LOG.info("Get the https address: " + httpsAddr);
+        }
+
+        HttpServer2.Builder builder = httpServerTemplateForHAS(conf, httpAddr, httpsAddr, "has");
+
+        try {
+            httpServer = builder.build();
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when building http server. " + e.getMessage());
+        }
+
+        init();
+
+        try {
+            httpServer.start();
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when starting http server. " + e.getMessage());
+        }
+        int connIdx = 0;
+        if (policy.isHttpEnabled()) {
+            httpAddress = httpServer.getConnectorAddress(connIdx++);
+            conf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY,
+                NetUtils.getHostPortString(httpAddress));
+        }
+
+        if (policy.isHttpsEnabled()) {
+            httpsAddress = httpServer.getConnectorAddress(connIdx);
+            conf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY,
+                NetUtils.getHostPortString(httpsAddress));
+        }
+    }
+
+    public void setWebServerAttribute(HasServer hasServer) {
+        httpServer.setAttribute(HAS_SERVER_ATTRIBUTE_KEY, hasServer);
+    }
+
+    public static HasServer getHasServerFromContext(ServletContext context) {
+        return (HasServer) context.getAttribute(HAS_SERVER_ATTRIBUTE_KEY);
+    }
+
+    /**
+     * Get http policy.
+     */
+    public HttpConfig.Policy getHttpPolicy(HasConfig conf) {
+        String policyStr = conf.getString(WebConfigKey.HAS_HTTP_POLICY_KEY,
+            WebConfigKey.HAS_HTTP_POLICY_DEFAULT);
+        HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
+        if (policy == null) {
+            throw new HadoopIllegalArgumentException("Unregonized value '"
+                + policyStr + "' for " + WebConfigKey.HAS_HTTP_POLICY_KEY);
+        }
+
+        conf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, policy.name());
+        return policy;
+    }
+
+    /**
+     * Return a HttpServer.Builder that the ssm can use to
+     * initialize their HTTP / HTTPS server.
+     */
+    public HttpServer2.Builder httpServerTemplateForHAS(
+        HasConfig conf, final InetSocketAddress httpAddr, final InetSocketAddress httpsAddr,
+        String name) throws HasException {
+        HttpConfig.Policy policy = getHttpPolicy(conf);
+
+        HttpServer2.Builder builder = new HttpServer2.Builder().setName(name);
+
+        if (policy.isHttpEnabled()) {
+            if (httpAddr.getPort() == 0) {
+                builder.setFindPort(true);
+            }
+
+            URI uri = URI.create("http://" + NetUtils.getHostPortString(httpAddr));
+            builder.addEndpoint(uri);
+            LOG.info("Starting Web-server for " + name + " at: " + uri);
+        }
+
+        if (policy.isHttpsEnabled() && httpsAddr != null) {
+            HasConfig sslConf = loadSslConfiguration(conf);
+            loadSslConfToHttpServerBuilder(builder, sslConf);
+
+            if (httpsAddr.getPort() == 0) {
+                builder.setFindPort(true);
+            }
+
+            URI uri = URI.create("https://" + NetUtils.getHostPortString(httpsAddr));
+            builder.addEndpoint(uri);
+            LOG.info("Starting Web-server for " + name + " at: " + uri);
+        }
+
+        return builder;
+    }
+
+    /**
+     * Load HTTPS-related configuration.
+     */
+    public HasConfig loadSslConfiguration(HasConfig conf) throws HasException {
+        HasConfig sslConf = new HasConfig();
+
+        String sslConfigString = conf.getString(
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT);
+        LOG.info("Get the ssl config file: " + sslConfigString);
+        try {
+            sslConf.addIniConfig(new File(sslConfigString));
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when adding config. " + e.getMessage());
+        }
+
+        final String[] reqSslProps = {
+            WebConfigKey.HAS_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_LOCATION_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYPASSWORD_KEY
+        };
+
+        // Check if the required properties are included
+        for (String sslProp : reqSslProps) {
+            if (sslConf.getString(sslProp) == null) {
+                LOG.warn("SSL config " + sslProp + " is missing. If "
+                    + WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY
+                    + " is specified, make sure it is a relative path");
+            }
+        }
+
+        boolean requireClientAuth = conf.getBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY,
+            WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
+        sslConf.setBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY, requireClientAuth);
+        return sslConf;
+    }
+
+    public HttpServer2.Builder loadSslConfToHttpServerBuilder(HttpServer2.Builder builder,
+                                                              HasConfig sslConf) {
+        return builder
+            .needsClientAuth(
+                sslConf.getBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY,
+                    WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT))
+            .keyPassword(getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_KEYPASSWORD_KEY))
+            .keyStore(sslConf.getString("ssl.server.keystore.location"),
+                getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY),
+                sslConf.getString("ssl.server.keystore.type", "jks"))
+            .trustStore(sslConf.getString("ssl.server.truststore.location"),
+                getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY),
+                sslConf.getString("ssl.server.truststore.type", "jks"))
+            .excludeCiphers(
+                sslConf.getString("ssl.server.exclude.cipher.list"));
+    }
+
+    /**
+     * Leverages the Configuration.getPassword method to attempt to get
+     * passwords from the CredentialProvider API before falling back to
+     * clear text in config - if falling back is allowed.
+     *
+     * @param conf  Configuration instance
+     * @param alias name of the credential to retreive
+     * @return String credential value or null
+     */
+    public String getPassword(HasConfig conf, String alias) {
+
+        return conf.getString(alias);
+    }
+
+    public void stop() throws Exception {
+        if (httpServer != null) {
+            httpServer.stop();
+        }
+    }
+
+    public InetSocketAddress getHttpAddress() {
+        return httpAddress;
+    }
+
+    public InetSocketAddress getHttpsAddress() {
+        return httpsAddress;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/ConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/ConfApi.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/ConfApi.java
new file mode 100644
index 0000000..d113746
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/ConfApi.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest;
+
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.has.server.HasServer;
+import org.apache.kerby.has.server.web.WebServer;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * HAS configure web methods implementation.
+ */
+@Path("/conf")
+public class ConfApi {
+
+    @Context
+    private ServletContext context;
+
+    @Context
+    private HttpServletRequest httpRequest;
+
+    /**
+     * Set HAS plugin.
+     *
+     * @param plugin HAS plugin name
+     * @return Response
+     */
+    @PUT
+    @Path("/setplugin")
+    @Consumes({MediaType.TEXT_PLAIN})
+    @Produces({MediaType.TEXT_PLAIN})
+    public Response setPlugin(@QueryParam("plugin") final String plugin) {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            WebServer.LOG.info("Set HAS plugin...");
+            try {
+                Map<String, String> values = new HashMap<>();
+                File hasConfFile = new File(hasServer.getConfDir(), "has-server.conf");
+                HasConfig hasConfig = HasUtil.getHasConfig(hasConfFile);
+                if (hasConfig != null) {
+                    String defaultValue = hasConfig.getPluginName();
+                    values.put(defaultValue, plugin);
+                } else {
+                    throw new RuntimeException("has-server.conf not found. ");
+                }
+                hasServer.updateConfFile("has-server.conf", values);
+            } catch (IOException | HasException e) {
+                throw new RuntimeException("Failed to set HAS plugin. ", e);
+            }
+            WebServer.LOG.info("HAS plugin set successfully.");
+
+            return Response.status(200).entity("HAS plugin set successfully.\n").build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Config HAS server backend.
+     *
+     * @param backendType type of backend
+     * @param dir         json dir
+     * @param driver      mysql JDBC connector driver
+     * @param url         mysql JDBC connector url
+     * @param user        mysql user name
+     * @param password    mysql password of user
+     * @return Response
+     */
+    @PUT
+    @Path("/configkdcbackend")
+    @Consumes({MediaType.APPLICATION_JSON})
+    @Produces({MediaType.TEXT_PLAIN})
+    public Response configKdcBackend(
+        @QueryParam("backendType") final String backendType,
+        @QueryParam("dir") @DefaultValue("/tmp/has/jsonbackend") final String dir,
+        @QueryParam("driver") @DefaultValue("com.mysql.jdbc.Driver") final String driver,
+        @QueryParam("url") @DefaultValue("jdbc:mysql://127.0.0.1:3306/mysqlbackend") final String url,
+        @QueryParam("user") @DefaultValue("root") final String user,
+        @QueryParam("password") @DefaultValue("passwd") final String password) {
+
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            if ("json".equals(backendType)) {
+                WebServer.LOG.info("Set Json backend...");
+                try {
+                    Map<String, String> values = new HashMap<>();
+                    values.put("_JAR_", "org.apache.kerby.kerberos.kdc.identitybackend.JsonIdentityBackend");
+                    values.put("#_JSON_DIR_", "backend.json.dir = " + dir);
+                    values.put("#_MYSQL_\n", "");
+                    hasServer.updateConfFile("backend.conf", values);
+                } catch (IOException | HasException e) {
+                    throw new RuntimeException("Failed to set Json backend. ", e);
+                }
+                WebServer.LOG.info("Json backend set successfully.");
+
+                return Response.status(200).entity("Json backend set successfully.\n").build();
+            } else if ("mysql".equals(backendType)) {
+                WebServer.LOG.info("Set MySQL backend...");
+                try {
+                    String mysqlConfig = "mysql_driver = " + driver + "\nmysql_url = " + url
+                        + "\nmysql_user = " + user + "\nmysql_password = " + password;
+                    Map<String, String> values = new HashMap<>();
+                    values.put("_JAR_", "org.apache.kerby.has.server.kdc.MySQLIdentityBackend");
+                    values.put("#_JSON_DIR_\n", "");
+                    values.put("#_MYSQL_", mysqlConfig);
+                    hasServer.updateConfFile("backend.conf", values);
+                } catch (IOException | HasException e) {
+                    throw new RuntimeException("Failed to set MySQL backend. ", e);
+                }
+                WebServer.LOG.info("MySQL backend set successfully.");
+
+                return Response.status(200).entity("MySQL backend set successfully.\n").build();
+            } else {
+                return Response.status(400).entity(backendType + " is not supported.\n").build();
+            }
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Config HAS server KDC.
+     * @param port KDC port to set
+     * @param realm KDC realm to set
+     * @param host KDC host to set
+     * @return Response
+     */
+    @PUT
+    @Path("/configkdc")
+    @Consumes({MediaType.TEXT_PLAIN})
+    @Produces({MediaType.TEXT_PLAIN})
+    public Response configKdc(
+        @QueryParam("port") final int port,
+        @QueryParam("realm") final String realm,
+        @QueryParam("host") final String host) {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            WebServer.LOG.info("Config HAS server KDC...");
+            try {
+                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
+                String backendJar = backendConfig.getString("kdc_identity_backend");
+                if (backendJar.equals("org.apache.kerby.has.server.kdc.MySQLIdentityBackend")) {
+                    hasServer.configMySQLKdc(backendConfig, realm, port, host, hasServer);
+                } else {
+                    Map<String, String> values = new HashMap<>();
+                    values.put("_HOST_", host);
+                    values.put("_PORT_", String.valueOf(port));
+                    values.put("_REALM_", realm);
+                    hasServer.updateConfFile("kdc.conf", values);
+                    String kdc = "\t\tkdc = " + host + ":" + port;
+                    values.put("_KDCS_", kdc);
+                    values.put("_UDP_LIMIT_", "4096");
+                    hasServer.updateConfFile("krb5.conf", values);
+                }
+            } catch (IOException | HasException | KrbException e) {
+                throw new RuntimeException("Failed to config HAS KDC. ", e);
+            }
+            WebServer.LOG.info("HAS server KDC set successfully.");
+            return Response.status(200).entity("HAS server KDC set successfully.\n").build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/HadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/HadminApi.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/HadminApi.java
new file mode 100644
index 0000000..53361fb
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/HadminApi.java
@@ -0,0 +1,455 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest;
+
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.has.server.HasServer;
+import org.apache.kerby.has.server.admin.LocalHasAdmin;
+import org.apache.kerby.has.server.web.HostRoleType;
+import org.apache.kerby.has.server.web.WebServer;
+import org.apache.kerby.has.server.web.rest.param.HostParam;
+import org.apache.kerby.has.server.web.rest.param.HostRoleParam;
+import org.apache.kerby.has.server.web.rest.param.PasswordParam;
+import org.apache.kerby.has.server.web.rest.param.PrincipalParam;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+/**
+ * HAS HasAdmin web methods implementation.
+ */
+@Path("/admin")
+public class HadminApi {
+
+    @Context
+    private ServletContext context;
+
+    @Context
+    private HttpServletRequest httpRequest;
+
+    private void compressFile(File file, ZipOutputStream out, String basedir) {
+        if (!file.exists()) {
+            return;
+        }
+        try {
+            BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
+            ZipEntry entry = new ZipEntry(basedir + file.getName());
+            out.putNextEntry(entry);
+            int count;
+            byte[] data = new byte[8192];
+            while ((count = bis.read(data, 0, 8192)) != -1) {
+                out.write(data, 0, count);
+            }
+            bis.close();
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * @param host Hadoop node
+     * @param role Hadoop role
+     * @return Response
+     */
+    @GET
+    @Path("/exportkeytabs")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response exportKeytabs(@QueryParam(HostParam.NAME) @DefaultValue(HostParam.DEFAULT)
+                                  final HostParam host,
+                                  @QueryParam(HostRoleParam.NAME) @DefaultValue(HostRoleParam.DEFAULT)
+                                  final HostRoleParam role) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to export keytabs.");
+            LocalHasAdmin hasAdmin = null;
+            HasServer hasServer = null;
+            try {
+                hasServer = WebServer.getHasServerFromContext(context);
+                hasAdmin = new LocalHasAdmin(hasServer);
+            } catch (KrbException e) {
+                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+            }
+            if (host.getValue() != null) {
+                if (role.getValue() != null) {
+                    try {
+                        File file = hasAdmin.getKeytabByHostAndRole(host.getValue(), role.getValue());
+                        WebServer.LOG.info("Create keytab file for the " + role.getValue()
+                            + " for " + host.getValue());
+                        return Response.ok(file).header("Content-Disposition",
+                            "attachment; filename=" + role.getValue() + "-"
+                                + host.getValue() + ".keytab").build();
+                    } catch (HasException e) {
+                        WebServer.LOG.error("Failed to export keytab File because : " + e.getMessage());
+                    }
+                } else {
+                    //export keytabs zip file
+                    List<File> keytabs = new ArrayList<>();
+                    for (HostRoleType r : HostRoleType.values()) {
+                        try {
+                            keytabs.add(hasAdmin.getKeytabByHostAndRole(host.getValue(), r.getName()));
+                            WebServer.LOG.info("Create keytab file for the " + r.getName()
+                                + " for " + host.getValue());
+                        } catch (HasException e) {
+                            WebServer.LOG.info("Failed to export keytab File because : " + e.getMessage());
+                        }
+                    }
+                    if (keytabs.size() < 1) {
+                        return Response.serverError().build();
+                    }
+                    File path = new File(hasServer.getWorkDir(), "tmp/zip/"
+                        + System.currentTimeMillis());
+                    path.mkdirs();
+                    File keytabZip = new File(path, "keytab.zip");
+                    if (keytabZip.exists()) {
+                        keytabZip.delete();
+                    }
+                    try {
+                        ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(keytabZip));
+                        for (File keytab : keytabs) {
+                            compressFile(keytab, zos, "");
+                        }
+                        zos.close();
+                        WebServer.LOG.info("Success to create the keytab.zip.");
+                        return Response.ok(keytabZip).header("Content-Disposition",
+                            "attachment; filename=keytab.zip").build();
+                    } catch (Exception e) {
+                        WebServer.LOG.error("Failed to create the keytab.zip,because : " + e.getMessage());
+                    }
+                }
+            }
+            return Response.serverError().build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * export single keytab file
+     *
+     * @param principal principal name to export keytab file
+     * @return Response
+     */
+    @GET
+    @Path("/exportkeytab")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response exportKeytab(@QueryParam("principal") final String principal) {
+        if (httpRequest.isSecure()) {
+            LocalHasAdmin hasAdmin = null;
+            WebServer.LOG.info("Exporting keytab file for " + principal + "...");
+            try {
+                HasServer hasServer = WebServer.getHasServerFromContext(context);
+                hasAdmin = new LocalHasAdmin(hasServer);
+            } catch (KrbException e) {
+                WebServer.LOG.error("Failed to create local hadmin." + e.getMessage());
+            }
+            WebServer.LOG.info("Create keytab file for " + principal + " successfully.");
+            if (principal != null) {
+                try {
+                    File path = new File("/tmp/" + System.currentTimeMillis());
+                    if (path.mkdirs()) {
+                        File keytabFile = new File(path, principal + ".keytab");
+                        hasAdmin.exportKeytab(keytabFile, principal);
+                        return Response.ok(keytabFile).header("Content-Disposition", "attachment; filename="
+                            + keytabFile.getName()).build();
+                    }
+                } catch (HasException e) {
+                    WebServer.LOG.error("Failed to export keytab. " + e.toString());
+                    return Response.serverError().build();
+                }
+            }
+            return Response.serverError().build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @PUT
+    @Path("/setconf")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response setConf(@QueryParam("isEnable") String isEnable) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to admin/setconf.");
+            final HasServer hasServer = WebServer.getHasServerFromContext(
+                context);
+            File hasConf = new File(hasServer.getConfDir(), "has-server.conf");
+            if (!hasConf.exists()) {
+                WebServer.LOG.error("has-server.conf is not exists.");
+                return Response.serverError().entity("has-server.conf is not exists.")
+                    .build();
+            }
+            String result = "";
+            if (isEnable.equals("true")) {
+                result = "enable";
+            } else if (isEnable.equals("false")) {
+                result = "disable";
+            } else {
+                WebServer.LOG.error("Value of isEnable is error.");
+                return Response.serverError().entity("Value of isEnable is error.")
+                    .build();
+            }
+            try {
+                HasUtil.setEnableConf(hasConf, isEnable);
+            } catch (Exception e) {
+                WebServer.LOG.error(e.getMessage());
+                return Response.serverError().entity(e.getMessage()).build();
+            }
+            return Response.ok("Set conf to " + result).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @GET
+    @Path("/getprincipals")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response getprincipals(@QueryParam("exp") String exp) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to get principals.");
+            JSONObject result = new JSONObject();
+            String msg;
+            LocalHasAdmin hasAdmin = null;
+            try {
+                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
+            } catch (KrbException e) {
+                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+            }
+            try {
+                JSONArray principals = new JSONArray();
+                List<String> princList = hasAdmin.getPrincipals(exp);
+                for (String princ : princList) {
+                    principals.put(princ);
+                }
+                WebServer.LOG.info("Success to get principals with JSON.");
+                result.put("result", "success");
+                result.put("msg", principals.toString());
+                return Response.ok(result.toString()).build();
+            } catch (Exception e) {
+                WebServer.LOG.error("Failed to get principals,because : " + e.getMessage());
+                msg = "Failed to get principals,because : " + e.getMessage();
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Add principal by name and password.
+     *
+     * @param principal principal name.
+     * @param password  principal password
+     * @return Response
+     */
+    @POST
+    @Path("/addprincipal")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response addprincipal(@QueryParam(PrincipalParam.NAME) @DefaultValue(PrincipalParam.DEFAULT)
+                                 final PrincipalParam principal,
+                                 @QueryParam(PasswordParam.NAME) @DefaultValue(PasswordParam.DEFAULT)
+                                 final PasswordParam password) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to add the principal named " + principal.getValue());
+            LocalHasAdmin hasAdmin = null;
+            try {
+                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
+            } catch (KrbException e) {
+                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+            }
+            JSONObject result = new JSONObject();
+            String msg = "Add principal successfully.";
+            try {
+                hasAdmin.addPrincipal(principal.getValue(), password.getValue());
+                result.put("result", "success");
+                result.put("msg", msg);
+                return Response.ok(result.toString()).build();
+            } catch (Exception e) {
+                WebServer.LOG.error("Failed to add " + principal + " principal, because: " + e.getMessage());
+                msg = "Failed to add " + principal + " principal, because: " + e.getMessage();
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @POST
+    @Path("/renameprincipal")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response renamePrincipal(@QueryParam("oldprincipal") String oldPrincipal,
+                                    @QueryParam("newprincipal") String newPrincipal) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to rename " + oldPrincipal + " to " + newPrincipal);
+            JSONObject result = new JSONObject();
+            String msg = "Rename principal successfully.";
+            if (oldPrincipal != null && newPrincipal != null) {
+                LocalHasAdmin hasAdmin = null;
+                try {
+                    hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
+                } catch (KrbException e) {
+                    WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+                }
+                try {
+                    hasAdmin.renamePrincipal(oldPrincipal, newPrincipal);
+                    result.put("result", "success");
+                    result.put("msg", msg);
+                    return Response.ok(result.toString()).build();
+                } catch (Exception e) {
+                    WebServer.LOG.error("Failed to rename principal " + oldPrincipal + " to "
+                        + newPrincipal + ",because: " + e.getMessage());
+                    msg = "Failed to rename principal " + oldPrincipal + " to "
+                        + newPrincipal + ",because: " + e.getMessage();
+                }
+            } else {
+                WebServer.LOG.error("Value of old or new principal is null.");
+                msg = "Value of old or new principal is null.";
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Delete principal by name.
+     *
+     * @param principal principal like "admin" or "admin@HADOOP.COM".
+     * @return Response
+     */
+    @DELETE
+    @Path("/deleteprincipal")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response deleteprincipal(@QueryParam(PrincipalParam.NAME) @DefaultValue(PrincipalParam.DEFAULT)
+                                    final PrincipalParam principal) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to delete the principal named " + principal.getValue());
+            JSONObject result = new JSONObject();
+            String msg = "Delete principal successfully.";
+            LocalHasAdmin hasAdmin = null;
+            try {
+                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
+            } catch (KrbException e) {
+                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+            }
+            try {
+                hasAdmin.deletePrincipal(principal.getValue());
+                result.put("result", "success");
+                result.put("msg", msg);
+                return Response.ok(result.toString()).build();
+            } catch (Exception e) {
+                WebServer.LOG.error("Failed to delete the principal named " + principal.getValue()
+                    + ",because : " + e.getMessage());
+                msg = "Failed to delete the principal named " + principal.getValue()
+                    + ",because : " + e.getMessage();
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @PUT
+    @Path("/createprincipals")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response createprincipals(@Context HttpServletRequest request) {
+        if (httpRequest.isSecure()) {
+            LocalHasAdmin hasAdmin = null;
+            try {
+                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
+            } catch (KrbException e) {
+                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+            }
+            JSONObject result = new JSONObject();
+            String msg = "";
+            try {
+                StringBuilder data = new StringBuilder();
+                BufferedReader br = new BufferedReader(new InputStreamReader(request.getInputStream()));
+                String s;
+                while ((s = br.readLine()) != null) {
+                    data.append(s);
+                }
+                WebServer.LOG.info("Request to create principals by JSON : \n" + data.toString());
+                JSONArray hostArray = new JSONObject(data.toString()).optJSONArray("HOSTS");
+                for (int i = 0; i < hostArray.length(); i++) {
+                    JSONObject host = (JSONObject) hostArray.get(i);
+                    String[] roles = host.getString("hostRoles").split(",");
+                    for (String role : roles) {
+                        msg += hasAdmin.addPrincByRole(host.getString("name"), role.toUpperCase());
+                    }
+                }
+                result.put("result", "success");
+                result.put("msg", msg);
+                return Response.ok(result.toString()).build();
+            } catch (Exception e) {
+                WebServer.LOG.error("Failed to create principals,because : " + e.getMessage());
+                msg = "Failed to create principals,because : " + e.getMessage();
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/HasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/HasApi.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/HasApi.java
new file mode 100644
index 0000000..9e73211
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/HasApi.java
@@ -0,0 +1,336 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.has.server.HasAuthenException;
+import org.apache.kerby.has.server.HasServer;
+import org.apache.kerby.has.server.HasServerPlugin;
+import org.apache.kerby.has.server.HasServerPluginRegistry;
+import org.apache.kerby.has.server.kdc.HasKdcHandler;
+import org.apache.kerby.has.server.web.HostRoleType;
+import org.apache.kerby.has.server.web.WebServer;
+import org.apache.kerby.has.server.web.rest.param.AuthTokenParam;
+import org.apache.kerby.has.server.web.rest.param.TypeParam;
+import org.apache.hadoop.http.JettyUtils;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.KrbRuntime;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.provider.TokenDecoder;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * HAS web methods implementation.
+ */
+@Path("")
+public class HasApi {
+
+    @Context
+    private ServletContext context;
+
+    @Context
+    private HttpServletRequest httpRequest;
+
+    /**
+     * Get krb5.conf file.
+     *
+     * @return Response
+     */
+    @GET
+    @Path("/getkrb5conf")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response getKrb5Conf() {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            try {
+                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
+                String backendJar = backendConfig.getString("kdc_identity_backend");
+                File conf;
+                if (backendJar.equals("org.apache.kerby.has.server.kdc.MySQLIdentityBackend")) {
+                    conf = hasServer.generateKrb5Conf();
+                } else {
+                    File confDir = hasServer.getConfDir();
+                    conf = new File(confDir, "krb5.conf");
+                }
+                return Response.ok(conf).header("Content-Disposition", "attachment; filename=krb5.conf").build();
+            } catch (KrbException | HasException e) {
+                throw new RuntimeException("Failed to get Krb5.conf. ", e);
+            }
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Get has-client.conf file.
+     *
+     * @return Response
+     */
+    @GET
+    @Path("/gethasconf")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response getHasConf() {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            try {
+                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
+                String backendJar = backendConfig.getString("kdc_identity_backend");
+                File conf;
+                if (backendJar.equals("org.apache.kerby.has.server.kdc.MySQLIdentityBackend")) {
+                    conf = hasServer.generateHasConf();
+                } else {
+                    File confDir = hasServer.getConfDir();
+                    conf = new File(confDir, "has-server.conf");
+                }
+                return Response.ok(conf).header("Content-Disposition", "attachment; filename=has-client.conf").build();
+            } catch (IOException | KrbException | HasException e) {
+                throw new RuntimeException("Failed to get has-client.conf. ", e);
+            }
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Get CA file.
+     *
+     * @return Response
+     */
+    @GET
+    @Path("/getcert")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response getCert() {
+        final HasServer hasServer = WebServer.getHasServerFromContext(context);
+        String errMessage = null;
+        File cert = null;
+        try {
+            HasConfig hasConfig = HasUtil.getHasConfig(
+                new File(hasServer.getConfDir(), "has-server.conf"));
+            if (hasConfig != null) {
+                String certPath = hasConfig.getSslClientCert();
+                cert = new File(certPath);
+                if (!cert.exists()) {
+                    errMessage = "Cert file not found in HAS server.";
+                    WebServer.LOG.error("Cert file not found in HAS server.");
+                }
+            } else {
+                errMessage = "has-server.conf not found.";
+                WebServer.LOG.error("has-server.conf not found.");
+            }
+        } catch (HasException e) {
+            errMessage = "Failed to get cert file" + e.getMessage();
+            WebServer.LOG.error("Failed to get cert file" + e.getMessage());
+        }
+        if (errMessage == null) {
+            return Response.ok(cert).header("Content-Disposition",
+                "attachment;filename=" + cert.getName()).build();
+        } else {
+            return Response.status(Response.Status.NOT_FOUND).entity(errMessage).build();
+        }
+    }
+
+    @GET
+    @Path("/hostroles")
+    @Produces(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8)
+    public Response getRoles() {
+        if (httpRequest.isSecure()) {
+            JSONArray result = new JSONArray();
+            try {
+                for (HostRoleType role : HostRoleType.values()) {
+                    JSONObject jso = new JSONObject();
+                    jso.put("HostRole", role.getName());
+                    JSONArray jsa = new JSONArray();
+                    String[] princs = role.getPrincs();
+                    for (String princ : princs) {
+                        jsa.put(princ);
+                    }
+                    jso.put("PrincipalNames", jsa);
+                    result.put(jso);
+                }
+                return Response.ok(result.toString() + "\n").type(MediaType.APPLICATION_JSON).build();
+            } catch (Exception e) {
+                WebServer.LOG.error("Failed to get host roles." + e.getMessage());
+            }
+            return Response.serverError().build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @GET
+    @Path("/kdcinit")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response kdcInit() {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            try {
+                File adminKeytab = hasServer.initKdcServer();
+                return Response.ok(adminKeytab).header("Content-Disposition",
+                    "attachment; filename=" + adminKeytab.getName()).build();
+            } catch (KrbException e) {
+                System.err.println("[ERROR] " + e.getMessage());
+            }
+            return Response.serverError().build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @GET
+    @Path("/kdcstart")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response kdcStart() {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            JSONObject result = new JSONObject();
+            String msg = "Succeed in starting KDC server.";
+
+            try {
+                hasServer.startKdcServer();
+            } catch (HasException e) {
+                WebServer.LOG.error("Fail to start kdc server. " + e.getMessage());
+                msg = e.getMessage();
+            }
+            try {
+                result.put("result", "success");
+                result.put("msg", msg);
+                return Response.ok(result.toString()).build();
+            } catch (Exception e) {
+                WebServer.LOG.error(e.getMessage());
+                msg = e.getMessage();
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Handle HTTP PUT request.
+     */
+    @PUT
+    @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8,
+        MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8})
+    public Response asRequest(
+        @QueryParam(TypeParam.NAME) @DefaultValue(TypeParam.DEFAULT)
+        final TypeParam type,
+        @QueryParam(AuthTokenParam.NAME) @DefaultValue(AuthTokenParam.DEFAULT)
+        final AuthTokenParam authToken
+    ) {
+        return asRequest(type.getValue(), authToken.getValue());
+    }
+
+    private Response asRequest(String type, String tokenStr) {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            String errMessage = null;
+            String js = null;
+            ObjectMapper mapper = new ObjectMapper();
+            final Map<String, Object> m = new TreeMap<String, Object>();
+
+            if (hasServer.getKdcServer() == null) {
+                errMessage = "Please start the has KDC server.";
+            } else if (!tokenStr.isEmpty() && tokenStr != null) {
+                HasKdcHandler kdcHandler = new HasKdcHandler(hasServer);
+
+                TokenDecoder tokenDecoder = KrbRuntime.getTokenProvider("JWT").createTokenDecoder();
+
+                AuthToken authToken = null;
+                try {
+                    authToken = tokenDecoder.decodeFromString(tokenStr);
+                } catch (IOException e) {
+                    errMessage = "Failed to decode the token string." + e.getMessage();
+                    WebServer.LOG.error(errMessage);
+                }
+                HasServerPlugin tokenPlugin = null;
+                try {
+                    tokenPlugin = HasServerPluginRegistry.createPlugin(type);
+                } catch (HasException e) {
+                    errMessage = "Fail to get the plugin: " + type + ". " + e.getMessage();
+                    WebServer.LOG.error(errMessage);
+                }
+                AuthToken verifiedAuthToken;
+                try {
+                    verifiedAuthToken = tokenPlugin.authenticate(authToken);
+                } catch (HasAuthenException e) {
+                    errMessage = "Failed to verify auth token: " + e.getMessage();
+                    WebServer.LOG.error(errMessage);
+                    verifiedAuthToken = null;
+                }
+
+                if (verifiedAuthToken != null) {
+                    KrbMessage asRep = kdcHandler.getResponse(verifiedAuthToken,
+                        (String) verifiedAuthToken.getAttributes().get("passPhrase"));
+
+                    Base64 base64 = new Base64(0);
+                    try {
+                        m.put("type", tokenPlugin.getLoginType());
+                        m.put("success", "true");
+                        m.put("krbMessage", base64.encodeToString(asRep.encode()));
+                    } catch (IOException e) {
+                        errMessage = "Failed to encode KrbMessage." + e.getMessage();
+                        WebServer.LOG.error(errMessage);
+                    }
+
+                }
+            } else {
+                errMessage = "The token string should not be empty.";
+                WebServer.LOG.error(errMessage);
+            }
+
+            if (errMessage != null) {
+                m.put("success", "false");
+                m.put("krbMessage", errMessage);
+            }
+            try {
+                js = mapper.writeValueAsString(m);
+            } catch (JsonProcessingException e) {
+                WebServer.LOG.error("Failed write values to string." + e.getMessage());
+            }
+            return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/AuthTokenParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/AuthTokenParam.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/AuthTokenParam.java
new file mode 100644
index 0000000..a0273e1
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/AuthTokenParam.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest.param;
+
+public class AuthTokenParam extends StringParam {
+  /**
+   * Parameter name.
+   */
+  public static final String NAME = "authToken";
+  /**
+   * Default parameter value.
+   */
+  public static final String DEFAULT = "";
+
+  private static final StringParam.Domain DOMAIN = new StringParam.Domain(NAME, null);
+
+  /**
+   * Constructor.
+   *
+   * @param str a string representation of the parameter value.
+   */
+  public AuthTokenParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/EnumParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/EnumParam.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/EnumParam.java
new file mode 100644
index 0000000..661c7e2
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/EnumParam.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest.param;
+
+import org.apache.hadoop.util.StringUtils;
+
+import java.util.Arrays;
+
+abstract class EnumParam<E extends Enum<E>>
+    extends Param<E, EnumParam.Domain<E>> {
+  EnumParam(final Domain<E> domain, final E value) {
+    super(domain, value);
+  }
+
+  /**
+   * The domain of the parameter.
+   */
+  static final class Domain<E extends Enum<E>> extends Param.Domain<E> {
+    private final Class<E> enumClass;
+
+    Domain(String name, Class<E> enumClass) {
+      super(name);
+      this.enumClass = enumClass;
+    }
+
+    @Override
+    public String getDomain() {
+      return Arrays.asList(enumClass.getEnumConstants()).toString();
+    }
+
+    @Override
+    E parse(String str) {
+      return Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/HostParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/HostParam.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/HostParam.java
new file mode 100644
index 0000000..acf0306
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/HostParam.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest.param;
+
+public class HostParam extends StringParam {
+  /**
+   * Parameter name.
+   */
+  public static final String NAME = "host";
+  /**
+   * Default parameter value.
+   */
+  public static final String DEFAULT = "";
+
+  private static final Domain DOMAIN = new Domain(NAME, null);
+
+  /**
+   * Constructor.
+   *
+   * @param str a string representation of the parameter value.
+   */
+  public HostParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/HostRoleParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/HostRoleParam.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/HostRoleParam.java
new file mode 100644
index 0000000..72706ff
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/HostRoleParam.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest.param;
+
+public class HostRoleParam extends StringParam {
+  /**
+   * Parameter name.
+   */
+  public static final String NAME = "role";
+  /**
+   * Default parameter value.
+   */
+  public static final String DEFAULT = "";
+
+  private static final Domain DOMAIN = new Domain(NAME, null);
+
+  /**
+   * Constructor.
+   *
+   * @param str a string representation of the parameter value.
+   */
+  public HostRoleParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/Param.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/Param.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/Param.java
new file mode 100644
index 0000000..4314ae2
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/Param.java
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest.param;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URLEncoder;
+import java.util.Arrays;
+import java.util.Comparator;
+
+/**
+ * Base class of parameters.
+ */
+public abstract class Param<T, D extends Param.Domain<T>> {
+  static final String NULL = "null";
+
+  static final Comparator<Param<?, ?>> NAME_CMP = new Comparator<Param<?, ?>>() {
+    @Override
+    public int compare(Param<?, ?> left, Param<?, ?> right) {
+      return left.getName().compareTo(right.getName());
+    }
+  };
+
+  /** Convert the parameters to a sorted String.
+   *
+   * @param separator URI parameter separator character
+   * @param parameters parameters to encode into a string
+   * @return the encoded URI string
+   */
+  public static String toSortedString(final String separator,
+                                      final Param<?, ?>... parameters) {
+    Arrays.sort(parameters, NAME_CMP);
+    final StringBuilder b = new StringBuilder();
+    try {
+      for (Param<?, ?> p : parameters) {
+        if (p.getValue() != null) {
+          b.append(separator)
+              .append(URLEncoder.encode(p.getName(), "UTF-8"))
+              .append("=")
+              .append(URLEncoder.encode(p.getValueString(), "UTF-8"));
+        }
+      }
+    } catch (UnsupportedEncodingException e) {
+      // Sane systems know about UTF-8, so this should never happen.
+      throw new RuntimeException(e);
+    }
+    return b.toString();
+  }
+
+  /** The domain of the parameter. */
+  final D domain;
+  /** The actual parameter value. */
+  final T value;
+
+  Param(final D domain, final T value) {
+    this.domain = domain;
+    this.value = value;
+  }
+
+  /** @return the parameter value. */
+  public final T getValue() {
+    return value;
+  }
+
+  /** @return the parameter value as a string */
+  public abstract String getValueString();
+
+  /** @return the parameter name. */
+  public abstract String getName();
+
+  @Override
+  public String toString() {
+    return getName() + "=" + value;
+  }
+
+  /** Base class of parameter domains. */
+  abstract static class Domain<T> {
+    /** Parameter name. */
+    final String paramName;
+
+    Domain(final String paramName) {
+      this.paramName = paramName;
+    }
+
+    /** @return the parameter name. */
+    public final String getParamName() {
+      return paramName;
+    }
+
+    /** @return a string description of the domain of the parameter. */
+    public abstract String getDomain();
+
+    /** @return the parameter value represented by the string. */
+    abstract T parse(String str);
+
+    /** Parse the given string.
+     * @return the parameter value represented by the string.
+     */
+    public final T parse(final String varName, final String str) {
+      try {
+        return str != null && str.trim().length() > 0 ? parse(str) : null;
+      } catch (Exception e) {
+        throw new IllegalArgumentException("Failed to parse \"" + str
+            + "\" for the parameter " + varName
+            + ".  The value must be in the domain " + getDomain(), e);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/PasswordParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/PasswordParam.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/PasswordParam.java
new file mode 100644
index 0000000..52c19ea
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/PasswordParam.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest.param;
+
+public class PasswordParam extends StringParam {
+  /**
+   * Parameter name.
+   */
+  public static final String NAME = "password";
+  /**
+   * Default parameter value.
+   */
+  public static final String DEFAULT = "";
+
+  private static final Domain DOMAIN = new Domain(NAME, null);
+
+  /**
+   * Constructor.
+   *
+   * @param str a string representation of the parameter value.
+   */
+  public PasswordParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/PrincipalParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/PrincipalParam.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/PrincipalParam.java
new file mode 100644
index 0000000..aadb78a
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/PrincipalParam.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest.param;
+
+public class PrincipalParam extends StringParam {
+  /**
+   * Parameter name.
+   */
+  public static final String NAME = "principal";
+  /**
+   * Default parameter value.
+   */
+  public static final String DEFAULT = "";
+
+  private static final Domain DOMAIN = new Domain(NAME, null);
+
+  /**
+   * Constructor.
+   *
+   * @param str a string representation of the parameter value.
+   */
+  public PrincipalParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/StringParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/StringParam.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/StringParam.java
new file mode 100644
index 0000000..8924734
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/StringParam.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest.param;
+
+import java.util.regex.Pattern;
+
+/**
+ * String parameter.
+ */
+abstract class StringParam extends Param<String, StringParam.Domain> {
+  StringParam(final Domain domain, String str) {
+    super(domain, domain.parse(str));
+  }
+
+  /**
+   * @return the parameter value as a string
+   */
+  @Override
+  public String getValueString() {
+    return value;
+  }
+
+  /**
+   * The domain of the parameter.
+   */
+  static final class Domain extends Param.Domain<String> {
+    /**
+     * The pattern defining the domain; null .
+     */
+    private final Pattern pattern;
+
+    Domain(final String paramName, final Pattern pattern) {
+      super(paramName);
+      this.pattern = pattern;
+    }
+
+    @Override
+    public String getDomain() {
+      return pattern == null ? "<String>" : pattern.pattern();
+    }
+
+    @Override
+    String parse(String str) {
+      if (str != null && pattern != null) {
+        if (!pattern.matcher(str).matches()) {
+          throw new IllegalArgumentException("Invalid value: \"" + str
+              + "\" does not belong to the domain " + getDomain());
+        }
+      }
+      return str;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/TypeParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/TypeParam.java b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/TypeParam.java
new file mode 100644
index 0000000..41b830e
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/kerby/has/server/web/rest/param/TypeParam.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.server.web.rest.param;
+
+public class TypeParam extends StringParam {
+
+    /**
+     * Parameter name.
+     */
+    public static final String NAME = "type";
+    /**
+     * Default parameter value.
+     */
+    public static final String DEFAULT = "";
+
+    private static final Domain DOMAIN = new Domain(NAME, null);
+
+    /**
+     * Constructor.
+     *
+     * @param str a string representation of the parameter value.
+     */
+    public TypeParam(final String str) {
+        super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+    }
+
+    @Override
+    public String getName() {
+        return NAME;
+    }
+}
+
+


[03/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java
deleted file mode 100644
index 24cb63c..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.kdcinit.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.kerby.kerberos.kerb.KrbException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-
-/**
- * Remote init kdc cmd
- */
-public class HasInitKdcCmd extends KdcInitCmd {
-
-    public static final String USAGE = "Usage: init_kdc [-p] [path]\n"
-        + "\tExample:\n"
-        + "\t\tinit_kdc\n";
-
-    public HasInitKdcCmd(HasAdminClient hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws KrbException {
-        if (items.length >= 2) {
-            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
-                System.out.println(USAGE);
-                return;
-            }
-        }
-        File path = getHadmin().getConfDir();
-        if (items.length >= 3 && items[1].startsWith("-p")) {
-            path = new File(items[2]);
-            if (!path.exists()) {
-                if (!path.mkdirs()) {
-                    System.err.println("Cannot create file : " + items[2]);
-                    return;
-                }
-            }
-        }
-        File hadminKeytab = new File(path, "admin.keytab");
-
-        HasAdminClient hasAdminClient = getHadmin();
-        InputStream content = hasAdminClient.initKdc();
-
-        if (content == null) {
-            System.err.println("Failed to init kdc.");
-            return;
-        }
-
-        FileOutputStream fos = null;
-        try {
-            fos = new FileOutputStream(hadminKeytab);
-        } catch (FileNotFoundException e) {
-            System.err.println("the admin keytab file not found. " + e.getMessage());
-        }
-        byte[] buffer = new byte[4 * 1024];
-        int read;
-        try {
-            while ((read = content.read(buffer)) > 0) {
-                fos.write(buffer, 0, read);
-            }
-            fos.close();
-            content.close();
-        } catch (IOException e) {
-            System.err.println("Errors occurred when getting the admin.keytab. " + e.getMessage());
-        }
-
-        System.out.println("admin.keytab has saved in : " + hadminKeytab.getAbsolutePath()
-            + ",\nplease safely save it to use hadmin.");
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java
deleted file mode 100644
index 457cf50..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.kdcinit.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.kerby.kerberos.kerb.KrbException;
-
-/**
- * Remote set plugin cmd
- */
-public class HasSetPluginCmd extends KdcInitCmd {
-
-    public static final String USAGE = "Usage: set_plugin <plugin>\n"
-        + "\tExample:\n"
-        + "\t\tset_plugin RAM\n";
-
-    public HasSetPluginCmd(HasAdminClient hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws KrbException {
-        if (items.length >= 2) {
-            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
-                System.out.println(USAGE);
-                return;
-            }
-        } else {
-            System.err.println(USAGE);
-            return;
-        }
-
-        HasAdminClient hasAdminClient = getHadmin();
-        hasAdminClient.setPlugin(items[1]);
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java
deleted file mode 100644
index 6511e0a..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.kdcinit.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.kerby.kerberos.kerb.KrbException;
-
-/**
- * Remote start kdc cmd
- */
-public class HasStartKdcCmd extends KdcInitCmd {
-
-    public static final String USAGE = "Usage: start_kdc\n"
-        + "\tExample:\n"
-        + "\t\tstart\n";
-
-    public HasStartKdcCmd(HasAdminClient hadmin) {
-        super(hadmin);
-    }
-
-    @Override
-    public void execute(String[] items) throws KrbException {
-        if (items.length >= 2) {
-            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
-                System.out.println(USAGE);
-                return;
-            }
-        }
-        HasAdminClient hasAdminClient = getHadmin();
-        hasAdminClient.startKdc();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/KdcInitCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/KdcInitCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/KdcInitCmd.java
deleted file mode 100644
index a75f702..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/KdcInitCmd.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.kdcinit.cmd;
-
-import org.apache.hadoop.has.client.HasAdminClient;
-import org.apache.kerby.kerberos.kerb.KrbException;
-
-public abstract class KdcInitCmd {
-
-    private HasAdminClient hadmin;
-
-    public KdcInitCmd(HasAdminClient hadmin) {
-        this.hadmin = hadmin;
-    }
-
-    protected HasAdminClient getHadmin() {
-        return hadmin;
-    }
-
-    /**
-     * Execute the kdc init cmd.
-     * @param input Input cmd to execute
-     */
-    public abstract void execute(String[] input) throws KrbException;
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitOption.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitOption.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitOption.java
deleted file mode 100644
index 0e29085..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitOption.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License. 
- *
- */
-package org.apache.hadoop.has.tool.client.kinit;
-
-import org.apache.kerby.KOption;
-import org.apache.kerby.KOptionInfo;
-import org.apache.kerby.KOptionType;
-import org.apache.kerby.kerberos.kerb.client.KrbOptionGroup;
-
-public enum KinitOption implements KOption {
-    NONE(null),
-
-    CLIENT_PRINCIPAL(new KOptionInfo("client-principal", "Client principal",
-        KrbOptionGroup.KRB, KOptionType.STR)),
-    LIFE_TIME(new KOptionInfo("-l", "lifetime",
-        KrbOptionGroup.KRB, KOptionType.DURATION)),
-    START_TIME(new KOptionInfo("-s", "start time",
-        KrbOptionGroup.KRB, KOptionType.DURATION)),
-    RENEWABLE_LIFE(new KOptionInfo("-r", "renewable lifetime",
-        KrbOptionGroup.KRB, KOptionType.DURATION)),
-    FORWARDABLE(new KOptionInfo("-f", "forwardable",
-        KrbOptionGroup.KDC_FLAGS)),
-    NOT_FORWARDABLE(new KOptionInfo("-F", "not forwardable",
-        KrbOptionGroup.KDC_FLAGS)),
-    PROXIABLE(new KOptionInfo("-p", "proxiable",
-        KrbOptionGroup.KDC_FLAGS)),
-    NOT_PROXIABLE(new KOptionInfo("-P", "not proxiable",
-        KrbOptionGroup.KDC_FLAGS)),
-    RENEW(new KOptionInfo("-R", "renew",
-        KrbOptionGroup.KDC_FLAGS)),
-    USE_PASSWD(new KOptionInfo("using-password", "using password",
-        KrbOptionGroup.KRB)),
-    USER_PASSWD(new KOptionInfo("user-passwd", "User plain password",
-        KrbOptionGroup.KRB)),
-    USE_KEYTAB(new KOptionInfo("-k", "use keytab",
-        KrbOptionGroup.KRB)),
-    USE_DFT_KEYTAB(new KOptionInfo("-i", "use default client keytab (with -k)",
-        KrbOptionGroup.KRB)),
-    KEYTAB_FILE(new KOptionInfo("-t", "filename of keytab to use",
-        KrbOptionGroup.KRB, KOptionType.FILE)),
-    KRB5_CACHE(new KOptionInfo("-c", "Kerberos 5 cache name",
-        KrbOptionGroup.KRB, KOptionType.STR)),
-    SERVICE(new KOptionInfo("-S", "service",
-        KrbOptionGroup.KRB, KOptionType.STR)),
-
-    CONF_DIR(new KOptionInfo("-conf", "conf dir", KrbOptionGroup.KRB, KOptionType.DIR));
-
-    private final KOptionInfo optionInfo;
-
-    KinitOption(KOptionInfo optionInfo) {
-        this.optionInfo = optionInfo;
-    }
-
-    @Override
-    public KOptionInfo getOptionInfo() {
-        return optionInfo;
-    }
-
-    public static KinitOption fromName(String name) {
-        if (name != null) {
-            for (KinitOption ko : values()) {
-                if (ko.optionInfo != null
-                        && ko.optionInfo.getName().equals(name)) {
-                    return ko;
-                }
-            }
-        }
-        return NONE;
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitTool.java
deleted file mode 100644
index a061266..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitTool.java
+++ /dev/null
@@ -1,384 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License. 
- *
- */
-package org.apache.hadoop.has.tool.client.kinit;
-
-import org.apache.kerby.KOption;
-import org.apache.kerby.KOptionGroup;
-import org.apache.kerby.KOptionInfo;
-import org.apache.kerby.KOptionType;
-import org.apache.kerby.KOptions;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.client.KrbClient;
-import org.apache.kerby.kerberos.kerb.client.KrbKdcOption;
-import org.apache.kerby.kerberos.kerb.client.KrbOption;
-import org.apache.kerby.kerberos.kerb.client.KrbOptionGroup;
-import org.apache.kerby.kerberos.kerb.client.PkinitOption;
-import org.apache.kerby.kerberos.kerb.client.TokenOption;
-import org.apache.kerby.kerberos.kerb.type.ticket.SgtTicket;
-import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
-import org.apache.kerby.util.OSUtil;
-import org.apache.kerby.util.SysUtil;
-
-import java.io.Console;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Arrays;
-import java.util.Scanner;
-
-/**
- * kinit like tool
- *
- * Ref. MIT kinit command tool usage.
- */
-public class KinitTool {
-
-    private static final String USAGE = (OSUtil.isWindows()
-            ? "Usage: bin\\kinit.cmd" : "Usage: sh bin/kinit.sh")
-            + " <-conf conf_dir> [-V] [-l lifetime] [-s start_time]\n"
-            + "\t\t[-r renewable_life] [-f | -F] [-p | -P] -n [-a | -A] [-C] [-E]\n"
-            + "\t\t[-v] [-R] [-k [-i|-t keytab_file]] [-c cachename]\n"
-            + "\t\t[-S service_name] [-T ticket_armor_cache]\n"
-            + "\t\t[-X <attribute>[=<value>]] <principal>\n\n"
-            + "\tDESCRIPTION:\n"
-            + "\t\tkinit obtains and caches an initial ticket-granting ticket for principal.\n\n"
-            + "\tOPTIONS:\n"
-            + "\t\t-V verbose\n"
-            + "\t\t-l lifetime\n"
-            + "\t\t-s start time\n"
-            + "\t\t-r renewable lifetime\n"
-            + "\t\t-f forwardable\n"
-            + "\t\t-F not forwardable\n"
-            + "\t\t-p proxiable\n"
-            + "\t\t-P not proxiable\n"
-            + "\t\t-n anonymous\n"
-            + "\t\t-a include addresses\n"
-            + "\t\t-A do not include addresses\n"
-            + "\t\t-v validate\n"
-            + "\t\t-R renew\n"
-            + "\t\t-C canonicalize\n"
-            + "\t\t-E client is enterprise principal name\n"
-            + "\t\t-k use keytab\n"
-            + "\t\t-i use default client keytab (with -k)\n"
-            + "\t\t-t filename of keytab to use\n"
-            + "\t\t-c Kerberos 5 cache name\n"
-            + "\t\t-S service\n"
-            + "\t\t-T armor credential cache\n"
-            + "\t\t-X <attribute>[=<value>]\n"
-            + "\n";
-
-    private static void printUsage(String error) {
-        System.err.println(error + "\n");
-        System.err.println(USAGE);
-        System.exit(-1);
-    }
-
-    private static final String KVNO_USAGE = (OSUtil.isWindows()
-        ? "Usage: bin\\kinit.cmd" : "Usage: sh bin/kinit.sh")
-        + " <-conf conf_dir> <-c cachename> <-S service_name>\n\n"
-        + "\tDESCRIPTION:\n"
-        + "\t\tkinit obtains a service ticket for the specified principal and prints out the key version number.\n"
-        + "\n";
-
-    private static void printKvnoUsage(String error) {
-        System.err.println(error + "\n");
-        System.err.println(KVNO_USAGE);
-        System.exit(-1);
-    }
-
-    /**
-     * Get password for the input principal from console
-     */
-    private static String getPassword(String principal) {
-        Console console = System.console();
-        if (console == null) {
-            System.out.println("Couldn't get Console instance, "
-                    + "maybe you're running this from within an IDE. "
-                    + "Use scanner to read password.");
-            System.out.println("Password for " + principal + ":");
-            try (Scanner scanner = new Scanner(System.in, "UTF-8")) {
-                return scanner.nextLine().trim();
-            }
-        }
-        console.printf("Password for " + principal + ":");
-        char[] passwordChars = console.readPassword();
-        String password = new String(passwordChars).trim();
-        Arrays.fill(passwordChars, ' ');
-
-        return password;
-    }
-
-    private static void requestTicket(String principal, KOptions ktOptions) {
-        ktOptions.add(KinitOption.CLIENT_PRINCIPAL, principal);
-
-        File confDir = null;
-        if (ktOptions.contains(KinitOption.CONF_DIR)) {
-            confDir = ktOptions.getDirOption(KinitOption.CONF_DIR);
-        }
-
-        KrbClient krbClient = null;
-        try {
-            krbClient = getClient(confDir);
-        } catch (KrbException e) {
-            System.err.println("Create krbClient failed: " + e.getMessage());
-            System.exit(1);
-        }
-
-        if (ktOptions.contains(KinitOption.RENEW)) {
-            if (ktOptions.contains(KinitOption.KRB5_CACHE)) {
-                String ccName = ktOptions.getStringOption(KinitOption.KRB5_CACHE);
-                File ccFile = new File(ccName);
-
-                SgtTicket sgtTicket = null;
-                try {
-                    sgtTicket = krbClient.requestSgt(ccFile, null);
-                } catch (KrbException e) {
-                    System.err.println("kinit: " + e.getKrbErrorCode().getMessage());
-                }
-
-                try {
-                    krbClient.renewTicket(sgtTicket, ccFile);
-                } catch (KrbException e) {
-                    System.err.println("kinit: " + e.getKrbErrorCode().getMessage());
-                }
-
-                System.out.println("Successfully renewed.");
-            }
-            return;
-        }
-
-        if (ktOptions.contains(KinitOption.SERVICE) && ktOptions.contains(KinitOption.KRB5_CACHE)) {
-            String ccName = ktOptions.getStringOption(KinitOption.KRB5_CACHE);
-            File ccFile = new File(ccName);
-            if (ccFile.exists()) {
-                System.out.println("Use credential cache to request a service ticket.");
-                String servicePrincipal = ktOptions.getStringOption(KinitOption.SERVICE);
-                SgtTicket sgtTicket = null;
-                try {
-                    sgtTicket = krbClient.requestSgt(ccFile, servicePrincipal);
-                } catch (KrbException e) {
-                    System.err.println("Kinit: get service ticket failed: " + e.getMessage());
-                    System.exit(1);
-                }
-
-                try {
-                    krbClient.storeTicket(sgtTicket, ccFile);
-                } catch (KrbException e) {
-                    System.err.println("Kinit: store ticket failed: " + e.getMessage());
-                    System.exit(1);
-                }
-
-                System.out.println(sgtTicket.getEncKdcRepPart().getSname().getName() + ": knvo = "
-                    + sgtTicket.getTicket().getEncryptedEncPart().getKvno());
-                return;
-            }
-        }
-
-        if (!ktOptions.contains(KinitOption.USE_KEYTAB)) {
-            //If not request tickets by keytab than by password.
-            ktOptions.add(KinitOption.USE_PASSWD);
-            String password = getPassword(principal);
-            ktOptions.add(KinitOption.USER_PASSWD, password);
-        }
-
-        TgtTicket tgt = null;
-        try {
-            tgt = krbClient.requestTgt(convertOptions(ktOptions));
-        } catch (KrbException e) {
-            System.err.println("Authentication failed: " + e.getMessage());
-            System.exit(1);
-        }
-
-        File ccacheFile;
-        if (ktOptions.contains(KinitOption.KRB5_CACHE)) {
-            String ccacheName = ktOptions.getStringOption(KinitOption.KRB5_CACHE);
-            ccacheFile = new File(ccacheName);
-        } else {
-            String ccacheName = getCcacheName(krbClient);
-            ccacheFile = new File(ccacheName);
-        }
-
-        try {
-            krbClient.storeTicket(tgt, ccacheFile);
-        } catch (KrbException e) {
-            System.err.println("Store ticket failed: " + e.getMessage());
-            System.exit(1);
-        }
-
-        System.out.println("Successfully requested and stored ticket in "
-            + ccacheFile.getAbsolutePath());
-
-        if (ktOptions.contains(KinitOption.SERVICE)) {
-            System.out.println("Use tgt to request a service ticket.");
-            String servicePrincipal = ktOptions.getStringOption(KinitOption.SERVICE);
-            SgtTicket sgtTicket;
-            try {
-                sgtTicket = krbClient.requestSgt(tgt, servicePrincipal);
-            } catch (KrbException e) {
-                System.err.println("kinit: " + e.getKrbErrorCode().getMessage());
-                return;
-            }
-
-            System.out.println(sgtTicket.getEncKdcRepPart().getSname().getName() + ": knvo = "
-                + sgtTicket.getTicket().getEncryptedEncPart().getKvno());
-        }
-    }
-
-    /**
-     * Init the client.
-     */
-    private static KrbClient getClient(File confDir) throws KrbException {
-        KrbClient krbClient;
-
-        if (confDir != null) {
-            krbClient = new KrbClient(confDir);
-        } else {
-            krbClient = new KrbClient();
-        }
-
-        krbClient.init();
-        return krbClient;
-    }
-
-    /**
-     * Get credential cache file name if not specified.
-     */
-    private static String getCcacheName(KrbClient krbClient) {
-        final String ccacheNameEnv = System.getenv("KRB5CCNAME");
-        final String ccacheNameConf = krbClient.getSetting().getKrbConfig().getString("default_ccache_name");
-        String ccacheName;
-        if (ccacheNameEnv != null) {
-            ccacheName = ccacheNameEnv;
-        } else if (ccacheNameConf != null) {
-            ccacheName = ccacheNameConf;
-        } else {
-            StringBuilder uid = new StringBuilder();
-            try {
-                //Get UID through "id -u" command
-                String command = "id -u";
-                Process child = Runtime.getRuntime().exec(command);
-                InputStream in = child.getInputStream();
-                int c;
-                while ((c = in.read()) != -1) {
-                    uid.append((char) c);
-                }
-                in.close();
-            } catch (IOException e) {
-                System.err.println("Failed to get UID.");
-                System.exit(1);
-            }
-            ccacheName = "krb5cc_" + uid.toString().trim();
-            ccacheName = SysUtil.getTempDir().toString() + "/" + ccacheName;
-        }
-
-        return ccacheName;
-    }
-
-    public static void main(String[] args) {
-        KOptions ktOptions = new KOptions();
-        KinitOption kto;
-        String principal = null;
-
-        int i = 0;
-        String opt, param, error;
-        while (i < args.length) {
-            error = null;
-
-            opt = args[i++];
-            if (opt.startsWith("-")) {
-                kto = KinitOption.fromName(opt);
-                if (kto == KinitOption.NONE) {
-                    error = "Invalid option:" + opt;
-                    System.err.println(error);
-                    break;
-                }
-            } else {
-                principal = opt;
-                kto = KinitOption.NONE;
-            }
-
-            if (kto != KinitOption.NONE && kto.getOptionInfo().getType() != KOptionType.NOV) {
-                // require a parameter
-                param = null;
-                if (i < args.length) {
-                    param = args[i++];
-                }
-                if (param != null) {
-                    KOptions.parseSetValue(kto.getOptionInfo(), param);
-                } else {
-                    error = "Option " + opt + " require a parameter";
-                }
-            }
-
-            if (error != null) {
-                printUsage(error);
-            }
-            if (kto != KinitOption.NONE) {
-                ktOptions.add(kto);
-            }
-        }
-
-        if (!ktOptions.contains(KinitOption.CONF_DIR)) {
-            printUsage("No conf dir given.");
-        }
-
-        if (principal == null) {
-            if (!ktOptions.contains(KinitOption.SERVICE) && !ktOptions.contains(KinitOption.KRB5_CACHE)) {
-                printUsage("No principal is specified");
-            } else if (ktOptions.contains(KinitOption.SERVICE) && !ktOptions.contains(KinitOption.KRB5_CACHE)) {
-                printKvnoUsage("No credential cache file given.");
-            }
-        }
-
-        requestTicket(principal, ktOptions);
-        System.exit(0);
-    }
-
-    /**
-     * Convert kinit tool options to KOptions.
-     * @param toolOptions
-     * @return KOptions
-     */
-    static KOptions convertOptions(KOptions toolOptions) {
-        KOptions results = new KOptions();
-
-        for (KOption toolOpt : toolOptions.getOptions()) {
-            KOptionInfo kOptionInfo = toolOpt.getOptionInfo();
-            KOptionGroup group = kOptionInfo.getGroup();
-            KOption kOpt = null;
-
-            if (group == KrbOptionGroup.KRB) {
-                kOpt = KrbOption.fromOptionName(kOptionInfo.getName());
-            } else if (group == KrbOptionGroup.PKINIT) {
-                kOpt = PkinitOption.fromOptionName(kOptionInfo.getName());
-            } else if (group == KrbOptionGroup.TOKEN) {
-                kOpt = TokenOption.fromOptionName(kOptionInfo.getName());
-            } else if (group == KrbOptionGroup.KDC_FLAGS) {
-                kOpt = KrbKdcOption.fromOptionName(kOptionInfo.getName());
-            }
-            if (kOpt != null && kOpt.getOptionInfo() != KrbOption.NONE.getOptionInfo()) {
-                kOpt.getOptionInfo().setValue(toolOpt.getOptionInfo().getValue());
-                results.add(kOpt);
-            }
-        }
-
-        return results;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistOption.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistOption.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistOption.java
deleted file mode 100644
index dab4d47..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistOption.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.klist;
-
-import org.apache.kerby.KOption;
-import org.apache.kerby.KOptionInfo;
-import org.apache.kerby.KOptionType;
-
-public enum KlistOption implements KOption {
-    NONE(null),
-    CREDENTIALS_CACHE(new KOptionInfo("-c", "specifies path of credentials cache",
-        KOptionType.STR)),
-    KEYTAB(new KOptionInfo("-k", "specifies keytab")),
-    DEFAULT_CLIENT_KEYTAB(new KOptionInfo("-i", "uses default client keytab if no name given")),
-    LIST_CREDENTIAL_CACHES(new KOptionInfo("-l", "list credential caches in collection")),
-    ALL_CREDENTIAL_CACHES(new KOptionInfo("-A", "shows content of all credential caches")),
-    ENCRYPTION_TYPE(new KOptionInfo("-e", "shows encryption type")),
-    KERBEROS_VERSION(new KOptionInfo("-V", "shows Kerberos version")),
-    AUTHORIZATION_DATA_TYPE(new KOptionInfo("-d", "shows the submitted authorization data type")),
-    CREDENTIALS_FLAGS(new KOptionInfo("-f", "show credential flags")),
-    EXIT_TGT_EXISTENCE(new KOptionInfo("-s", "sets exit status based on valid tgt existence")),
-    DISPL_ADDRESS_LIST(new KOptionInfo("-a", "displays the address list")),
-    NO_REVERSE_RESOLVE(new KOptionInfo("-n", "do not reverse resolve")),
-    SHOW_KTAB_ENTRY_TS(new KOptionInfo("-t", "shows keytab entry timestamps")),
-    SHOW_KTAB_ENTRY_KEY(new KOptionInfo("-K", "show keytab entry keys"));
-
-    private final KOptionInfo optionInfo;
-
-    KlistOption(KOptionInfo optionInfo) {
-        this.optionInfo = optionInfo;
-    }
-
-    @Override
-    public KOptionInfo getOptionInfo() {
-        return optionInfo;
-    }
-
-    public static KlistOption fromName(String name) {
-        if (name != null) {
-            for (KlistOption ko : values()) {
-                if (ko.optionInfo != null
-                        && ko.optionInfo.getName().equals(name)) {
-                    return ko;
-                }
-            }
-        }
-        return NONE;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistTool.java
deleted file mode 100644
index 7143c04..0000000
--- a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistTool.java
+++ /dev/null
@@ -1,293 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.tool.client.klist;
-
-import org.apache.kerby.KOptionType;
-import org.apache.kerby.KOptions;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.ccache.Credential;
-import org.apache.kerby.kerberos.kerb.ccache.CredentialCache;
-import org.apache.kerby.kerberos.kerb.client.KrbClient;
-import org.apache.kerby.kerberos.kerb.keytab.Keytab;
-import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry;
-import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
-import org.apache.kerby.util.HexUtil;
-import org.apache.kerby.util.OSUtil;
-import org.apache.kerby.util.SysUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.List;
-
-/**
- * klist like tool
- *
- * Ref. MIT klist command tool usage.
- */
-public class KlistTool {
-    private static final Logger LOG = LoggerFactory.getLogger(KlistTool.class);
-
-    private static final String USAGE = (OSUtil.isWindows()
-        ? "Usage: bin\\klist.cmd" : "Usage: sh bin/klist.sh")
-            + " [-e] [-V] [[-c] [-l] [-A] [-d] [-f] [-s] "
-            + "[-a [-n]]] [-k [-t] [-K]] [name]\n"
-            + "\t-c specifies credentials cache\n"
-            + "\t-k specifies keytab\n"
-            + "\t   (Default is credentials cache)\n"
-            + "\t-i uses default client keytab if no name given\n"
-            + "\t-l lists credential caches in collection\n"
-            + "\t-A shows content of all credential caches\n"
-            + "\t-e shows the encryption type\n"
-            + "\t-V shows the Kerberos version and exits\n"
-            + "\toptions for credential caches:\n"
-            + "\t\t-d shows the submitted authorization data types\n"
-            + "\t\t-f shows credentials flags\n"
-            + "\t\t-s sets exit status based on valid tgt existence\n"
-            + "\t\t-a displays the address list\n"
-            + "\t\t\t-n do not reverse-resolve\n"
-            + "\toptions for keytabs:\n"
-            + "\t\t-t shows keytab entry timestamps\n"
-            + "\t\t-K shows keytab entry keys\n";
-
-    // option "-k" hava a optional parameter, "/etc/krb5.keytab" if not specified
-    private static String keytabFilePath = null;
-
-    private static void printUsage(String error) {
-        System.err.println(error + "\n");
-        System.err.println(USAGE);
-        System.exit(-1);
-    }
-
-    private static int printCredentialCacheInfo(KOptions klOptions) {
-        CredentialCache cc = new CredentialCache();
-        List<Credential> credentials;
-        InputStream cis = null;
-        String fileName;
-
-        if (!klOptions.contains(KlistOption.CREDENTIALS_CACHE)) {
-            fileName = getCcacheName();
-        } else {
-            fileName = klOptions.getStringOption(KlistOption.CREDENTIALS_CACHE);
-        }
-        try {
-            cis = Files.newInputStream(Paths.get(fileName));
-            cc.load(cis);
-        } catch (IOException e) {
-            LOG.error("Failed to open CredentialCache from file: " + fileName + ". " + e.toString());
-        } finally {
-            try {
-                if (cis != null) {
-                    cis.close();
-                }
-            } catch (IOException e) {
-                LOG.warn("Fail to close input stream. " + e);
-            }
-        }
-
-        if (cc != null) {
-            credentials = cc.getCredentials();
-
-            System.out.println("Ticket cache: " + fileName);
-            System.out.println("Default principal: " + cc.getPrimaryPrincipal().getName());
-
-            if (credentials.isEmpty()) {
-                System.out.println("No credential has been cached.");
-            } else {
-                DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-
-                System.out.println("Valid starting\t\tExpires\t\t\tService principal");
-
-                for (Credential crd : credentials) {
-                    System.out.println(df.format(crd.getStartTime().getTime()) + "\t"
-                        + df.format(crd.getEndTime().getTime()) + "\t"
-                        + crd.getServerName() + "\n"
-                        + "\t" + "renew until" + "\t" + df.format(crd.getRenewTill().getTime()));
-                }
-            }
-        }
-
-        return 0;
-    }
-
-    /**
-     * Get credential cache file name if not specified.
-     */
-    private static String getCcacheName() {
-        String ccacheName;
-        String ccacheNameEnv = System.getenv("KRB5CCNAME");
-        String ccacheNameConf = null;
-        File confDir = new File("/etc");
-        try {
-            KrbClient krbClient = new KrbClient(confDir);
-            ccacheNameConf = krbClient.getSetting().getKrbConfig().getString("default_ccache_name");
-        } catch (KrbException e) {
-            System.err.println("Create krbClient failed: " + e.getMessage());
-            System.exit(1);
-        }
-        if (ccacheNameEnv != null) {
-            ccacheName = ccacheNameEnv;
-        } else if (ccacheNameConf != null) {
-            ccacheName = ccacheNameConf;
-        } else {
-            StringBuilder uid = new StringBuilder();
-            try {
-                //Get UID through "id -u" command
-                String command = "id -u";
-                Process child = Runtime.getRuntime().exec(command);
-                InputStream in = child.getInputStream();
-                int c;
-                while ((c = in.read()) != -1) {
-                    uid.append((char) c);
-                }
-                in.close();
-            } catch (IOException e) {
-                System.err.println("Failed to get UID.");
-                System.exit(1);
-            }
-            ccacheName = "krb5cc_" + uid.toString().trim();
-            ccacheName = SysUtil.getTempDir().toString() + "/" + ccacheName;
-        }
-
-        return ccacheName;
-    }
-
-    private static int printKeytabInfo(KOptions klOptions) {
-        String[] header = new String[4];
-        header[0] = "KVNO Principal\n"
-                + "---- --------------------------------------------------------------------------";
-        header[1] = header[0];
-        header[2] = "KVNO Timestamp           Principal\n"
-                + "---- ------------------- ------------------------------------------------------";
-        header[3] = header[2];
-        int outputIndex = 0;
-        if (klOptions.contains(KlistOption.SHOW_KTAB_ENTRY_TS)) {
-            outputIndex |= 2;
-        }
-        if (klOptions.contains(KlistOption.SHOW_KTAB_ENTRY_KEY)) {
-            outputIndex |= 1;
-        }
-        System.out.println("Keytab name: FILE:" + keytabFilePath);
-        try {
-            File keytabFile = new File(keytabFilePath);
-            if (!keytabFile.exists()) {
-                System.out.println("klist: Key table file '" + keytabFilePath + "' not found. ");
-                return 0;
-            }
-            System.out.println(header[outputIndex]);
-            SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss");
-            Keytab keytab = Keytab.loadKeytab(keytabFile);
-            List<PrincipalName> principals = keytab.getPrincipals();
-            for (PrincipalName principal : principals) {
-                List<KeytabEntry> keytabEntries = keytab.getKeytabEntries(principal);
-                for (KeytabEntry entry : keytabEntries) {
-                    StringBuilder sb = new StringBuilder();
-                    sb.append(String.format("%-4d ", entry.getKvno()));
-                    if ((outputIndex & 2) != 0) {
-                        Date date = new Date(entry.getTimestamp().getTime());
-                        sb.append(format.format(date));
-                        sb.append(' ');
-                    }
-                    sb.append(String.format("%s ", principal.getName()));
-                    if ((outputIndex & 1) != 0) {
-                        sb.append("(0x");
-                        sb.append(HexUtil.bytesToHex(entry.getKey().getKeyData()));
-                        sb.append(")");
-                    }
-                    System.out.println(sb);
-                }
-            }
-
-        } catch (IOException e) {
-            System.err.println("klist: Error while scan key table file '" + keytabFilePath + "'");
-        }
-        return 0;
-    }
-
-    private static int printInfo(KOptions klOptions) {
-        if (klOptions.contains(KlistOption.KEYTAB)) {
-            return printKeytabInfo(klOptions);
-        }
-        return printCredentialCacheInfo(klOptions);
-    }
-
-    public static void main(String[] args) throws Exception {
-        KOptions klOptions = new KOptions();
-        KlistOption klopt;
-        // String name = null;
-
-        int i = 0;
-        String opt, value, error;
-        while (i < args.length) {
-            error = null;
-            opt = args[i++];
-
-            if (opt.startsWith("-")) {
-                klopt = KlistOption.fromName(opt);
-                if (klopt == KlistOption.NONE) {
-                    error = "Invalid option:" + opt;
-                }
-            } else {
-                if (keytabFilePath == null && klOptions.contains(KlistOption.KEYTAB)) {
-                    keytabFilePath = opt;
-                }
-                break;
-            }
-
-            if (error == null && klopt.getOptionInfo().getType() != KOptionType.NOV) {
-                //needs value for this parameter
-                value = null;
-                if (i < args.length) {
-                    value = args[i++];
-                }
-                if (value != null) {
-                    KOptions.parseSetValue(klopt.getOptionInfo(), value);
-                } else {
-                    error = "Option" + klopt + "requires a following value";
-                }
-            }
-
-            if (error != null) {
-                printUsage(error);
-            }
-
-            klOptions.add(klopt);
-            if (klOptions.contains(KlistOption.KEYTAB)
-                && klOptions.contains(KlistOption.CREDENTIALS_CACHE)) {
-                error = "Can not use '-c' and '-k' at the same time ";
-                printUsage(error);
-            }
-        }
-
-        if (keytabFilePath == null) {
-            keytabFilePath = "/etc/krb5.keytab";
-        }
-
-        int errNo = KlistTool.printInfo(klOptions);
-        System.exit(errNo);
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/HadminRemoteTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/HadminRemoteTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/HadminRemoteTool.java
new file mode 100644
index 0000000..71300c0
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/HadminRemoteTool.java
@@ -0,0 +1,164 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.has.tool.client.hadmin.remote.cmd.HadminRemoteCmd;
+import org.apache.kerby.has.tool.client.hadmin.remote.cmd.HasRemoteAddPrincipalCmd;
+import org.apache.kerby.has.tool.client.hadmin.remote.cmd.HasRemoteCreatePrincipalsCmd;
+import org.apache.kerby.has.tool.client.hadmin.remote.cmd.HasRemoteDeletePrincipalCmd;
+import org.apache.kerby.has.tool.client.hadmin.remote.cmd.HasRemoteDisableConfCmd;
+import org.apache.kerby.has.tool.client.hadmin.remote.cmd.HasRemoteEnableConfCmd;
+import org.apache.kerby.has.tool.client.hadmin.remote.cmd.HasRemoteExportKeytabsCmd;
+import org.apache.kerby.has.tool.client.hadmin.remote.cmd.HasRemoteGetHostRolesCmd;
+import org.apache.kerby.has.tool.client.hadmin.remote.cmd.HasRemoteGetPrincipalsCmd;
+import org.apache.kerby.has.tool.client.hadmin.remote.cmd.HasRemoteRenamePrincipalCmd;
+import org.apache.kerby.util.OSUtil;
+
+import java.io.File;
+import java.util.Scanner;
+
+public class HadminRemoteTool {
+
+    private static final String PROMPT = HadminRemoteTool.class.getSimpleName() + ".remote";
+    private static final String USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\hadmin-remote.cmd" : "Usage: sh bin/hadmin-remote.sh")
+        + " <conf-file>\n"
+        + "\tExample:\n"
+        + "\t\t"
+        + (OSUtil.isWindows()
+        ? "bin\\hadmin-remote.cmd" : "sh bin/hadmin-remote.sh")
+        + " conf\n";
+
+    private static final String LEGAL_COMMANDS = "Available commands are: "
+        + "\n"
+        + "add_principal, addprinc\n"
+        + "                         Add principal\n"
+        + "delete_principal, delprinc\n"
+        + "                         Delete principal\n"
+        + "rename_principal, renprinc\n"
+        + "                         Rename principal\n"
+        + "list_principals, listprincs\n"
+        + "                         List principals\n"
+        + "get_hostroles, hostroles\n"
+        + "                         Get hostRoles\n"
+        + "export_keytabs, expkeytabs\n"
+        + "                         Export keytabs\n"
+        + "create_principals, creprincs\n"
+        + "                         Create principals\n"
+        + "enable_configure, enable\n"
+        + "                         Enable configure\n"
+        + "disable_configure, disable\n"
+        + "                         Disable configure\n";
+
+    public static void main(String[] args) {
+        HasAdminClient hadmin;
+        HasAuthAdminClient authHasAdminClient = null;
+
+        if (args.length < 1) {
+            System.err.println(USAGE);
+            System.exit(1);
+        }
+
+        String confDirPath = args[0];
+        File confFile = new File(confDirPath, "hadmin.conf");
+        HasConfig hasConfig;
+        try {
+            hasConfig = HasUtil.getHasConfig(confFile);
+        } catch (HasException e) {
+            System.err.println(e.getMessage());
+            return;
+        }
+
+        hadmin = new HasAdminClient(hasConfig);
+
+        if (hasConfig.getFilterAuthType().equals("kerberos")) {
+            authHasAdminClient = new HasAuthAdminClient(hasConfig);
+        }
+
+        System.out.println("enter \"cmd\" to see legal commands.");
+        System.out.print(PROMPT + ": ");
+
+        try (Scanner scanner = new Scanner(System.in, "UTF-8")) {
+            String input = scanner.nextLine();
+
+            while (!(input.equals("quit") || input.equals("exit") || input.equals("q"))) {
+                try {
+                    execute(hadmin, authHasAdminClient, input);
+                } catch (HasException e) {
+                    System.err.println(e.getMessage());
+                }
+                System.out.print(PROMPT + ": ");
+                input = scanner.nextLine();
+            }
+        }
+    }
+
+    private static void execute(HasAdminClient hadmin, HasAuthAdminClient hasAuthAdminClient,
+                               String input) throws HasException {
+        input = input.trim();
+        if (input.startsWith("cmd")) {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        HadminRemoteCmd executor;
+
+        String[] items = input.split("\\s+");
+        String cmd = items[0];
+
+        if (cmd.equals("add_principal")
+            || cmd.equals("addprinc")) {
+            executor = new HasRemoteAddPrincipalCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("delete_principal")
+            || cmd.equals("delprinc")) {
+            executor = new HasRemoteDeletePrincipalCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("rename_principal")
+            || cmd.equals("renprinc")) {
+            executor = new HasRemoteRenamePrincipalCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("list_principals")
+            || cmd.equals("listprincs")) {
+            executor = new HasRemoteGetPrincipalsCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("get_hostroles")
+            || cmd.equals("hostroles")) {
+            executor = new HasRemoteGetHostRolesCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("create_principals")
+            || cmd.equals("creprincs")) {
+            executor = new HasRemoteCreatePrincipalsCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("export_keytabs")
+            || cmd.equals("expkeytabs")) {
+            executor = new HasRemoteExportKeytabsCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("enable_configure")
+            || cmd.equals("enable")) {
+            executor = new HasRemoteEnableConfCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("disable_configure")
+            || cmd.equals("disable")) {
+            executor = new HasRemoteDisableConfCmd(hadmin, hasAuthAdminClient);
+        } else {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        executor.execute(items);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
new file mode 100644
index 0000000..d94c3d6
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasException;
+
+public abstract class HadminRemoteCmd {
+
+    private HasAdminClient hadmin;
+    private HasAuthAdminClient authHadmin;
+
+    public HadminRemoteCmd(HasAdminClient hadmin, HasAuthAdminClient authHadminClient) {
+        this.hadmin = hadmin;
+        this.authHadmin = authHadminClient;
+    }
+
+    protected HasAdminClient getHadmin() {
+        return hadmin;
+    }
+
+    protected HasAuthAdminClient getAuthHadmin() {
+        return authHadmin;
+    }
+
+    /**
+     * Execute the hadmin cmd.
+     * @param input Input cmd to execute
+     */
+    public abstract void execute(String[] input) throws HasException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
new file mode 100644
index 0000000..746d497
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
@@ -0,0 +1,70 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasException;
+
+/**
+ * Remote add principal cmd
+ */
+public class HasRemoteAddPrincipalCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: add_principal [options] <principal-name>\n"
+        + "\toptions are:\n"
+        + "\t\t[-randkey]\n"
+        + "\t\t[-pw password]"
+        + "\tExample:\n"
+        + "\t\tadd_principal -pw mypassword alice\n";
+
+    public HasRemoteAddPrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        String clientPrincipal = items[items.length - 1];
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        if (!items[1].startsWith("-")) {
+            hasAdminClient.addPrincipal(clientPrincipal);
+        } else if (items[1].startsWith("-randkey")) {
+            hasAdminClient.addPrincipal(clientPrincipal);
+        } else if (items[1].startsWith("-pw")) {
+            String password = items[2];
+            hasAdminClient.addPrincipal(clientPrincipal, password);
+        } else {
+            System.err.println("add_principal cmd format error.");
+            System.err.println(USAGE);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
new file mode 100644
index 0000000..95208be
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
@@ -0,0 +1,82 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasException;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+
+public class HasRemoteCreatePrincipalsCmd extends HadminRemoteCmd {
+    private static final String USAGE = "\nUsage: create_principals [hostRoles-file]\n"
+            + "\t'hostRoles-file' is a file with a hostRoles json string like:\n"
+            + "\t\t{HOSTS: [ {\"name\":\"host1\",\"hostRoles\":\"HDFS\"}, "
+            + "{\"name\":\"host2\",\"hostRoles\":\"HDFS,HBASE\"} ] }\n"
+            + "\tExample:\n"
+            + "\t\tcreate_principals hostroles.txt\n";
+
+    public HasRemoteCreatePrincipalsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        //String param = items[0];
+        if (items.length != 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        File hostRoles = new File(items[1]);
+        if (!hostRoles.exists()) {
+            System.err.println("HostRoles file is not exists.");
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        BufferedReader reader;
+        try {
+            reader = new BufferedReader(new FileReader(hostRoles));
+        } catch (FileNotFoundException e) {
+            throw new HasException("File not exist", e);
+        }
+        StringBuilder sb = new StringBuilder();
+        String tempString;
+        try {
+            while ((tempString = reader.readLine()) != null) {
+                sb.append(tempString);
+            }
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when read line. ", e);
+        }
+        hasAdminClient.requestCreatePrincipals(sb.toString());
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
new file mode 100644
index 0000000..4ca2f84
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
@@ -0,0 +1,89 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasException;
+
+import java.io.Console;
+import java.util.Scanner;
+
+/**
+ * Remote delete principal cmd
+ */
+public class HasRemoteDeletePrincipalCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: delete_principal <principal-name>\n"
+        + "\tExample:\n"
+        + "\t\tdelete_principal alice\n";
+
+    public HasRemoteDeletePrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        String principal = items[items.length - 1];
+        String reply;
+        Console console = System.console();
+        String prompt = "Are you sure to delete the principal? (yes/no, YES/NO, y/n, Y/N) ";
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                + "maybe you're running this from within an IDE. "
+                + "Use scanner to read password.");
+            Scanner scanner = new Scanner(System.in, "UTF-8");
+            reply = getReply(scanner, prompt);
+        } else {
+            reply = getReply(console, prompt);
+        }
+        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
+            hasAdminClient.deletePrincipal(principal);
+        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
+            System.out.println("Principal \"" + principal + "\"  not deleted.");
+        } else {
+            System.err.println("Unknown request, fail to delete the principal.");
+            System.err.println(USAGE);
+        }
+    }
+
+    private String getReply(Scanner scanner, String prompt) {
+        System.out.println(prompt);
+        return scanner.nextLine().trim();
+    }
+
+    private String getReply(Console console, String prompt) {
+        console.printf(prompt);
+        String line = console.readLine();
+        return line;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
new file mode 100644
index 0000000..05f0271
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasException;
+
+/**
+ * Remote add principal cmd
+ */
+public class HasRemoteDisableConfCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: disable_configure\n"
+            + "\tExample:\n"
+            + "\t\tdisable\n";
+
+    public HasRemoteDisableConfCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+        hasAdminClient.setEnableOfConf("false");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
new file mode 100644
index 0000000..cb1ecfa
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasException;
+
+/**
+ * Remote add principal cmd
+ */
+public class HasRemoteEnableConfCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: enable_configure\n"
+            + "\tExample:\n"
+            + "\t\tenable\n";
+
+    public HasRemoteEnableConfCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+        hasAdminClient.setEnableOfConf("true");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
new file mode 100644
index 0000000..ba11b3b
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
@@ -0,0 +1,58 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasException;
+
+public class HasRemoteExportKeytabsCmd extends HadminRemoteCmd {
+    private static final String USAGE = "\nUsage: export_keytabs <host> [role]\n"
+            + "\tExample:\n"
+            + "\t\texport_keytabs host1 HDFS\n";
+
+    public HasRemoteExportKeytabsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        //TODO add save path option
+        //String param = items[0];
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        String host = items[1];
+        String role = "";
+        if (items.length >= 3) {
+            role = items[2];
+        }
+        hasAdminClient.getKeytabByHostAndRole(host, role);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
new file mode 100644
index 0000000..255ee87
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
@@ -0,0 +1,68 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasException;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+
+public class HasRemoteGetHostRolesCmd extends HadminRemoteCmd {
+    private static final String USAGE = "Usage: get_hostroles\n"
+            + "\tExample:\n"
+            + "\t\tget_hostroles\n";
+
+    public HasRemoteGetHostRolesCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] input) throws HasException {
+        HasAdminClient hasAdminClient = getHadmin();
+        String result = hasAdminClient.getHostRoles();
+
+        if (result != null) {
+            try {
+                JSONArray hostRoles = new JSONArray(result);
+                for (int i = 0; i < hostRoles.length(); i++) {
+                    JSONObject hostRole = hostRoles.getJSONObject(i);
+                    System.out.print("\tHostRole: " + hostRole.getString("HostRole")
+                            + ", PrincipalNames: ");
+                    JSONArray principalNames = hostRole.getJSONArray("PrincipalNames");
+                    for (int j = 0; j < principalNames.length(); j++) {
+                        System.out.print(principalNames.getString(j));
+                        if (j == principalNames.length() - 1) {
+                            System.out.println();
+                        } else {
+                            System.out.print(", ");
+                        }
+                    }
+                }
+            } catch (JSONException e) {
+                throw new HasException("Errors occurred when getting the host roles.", e);
+            }
+        } else {
+            throw new HasException("Could not get hostRoles.");
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
new file mode 100644
index 0000000..6c98d38
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
@@ -0,0 +1,76 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasException;
+
+import java.util.List;
+
+public class HasRemoteGetPrincipalsCmd extends HadminRemoteCmd {
+    private static final String USAGE = "Usage: list_principals [expression]\n"
+            + "\t'expression' is a shell-style glob expression that can contain the wild-card characters ?, *, and []."
+            + "\tExample:\n"
+            + "\t\tlist_principals [expression]\n";
+
+    public HasRemoteGetPrincipalsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length > 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        List<String> principalLists = null;
+
+        if (items.length == 1) {
+            try {
+                principalLists = hasAdminClient.getPrincipals();
+            } catch (Exception e) {
+                System.err.println("Errors occurred when getting the principals. " + e.getMessage());
+            }
+        } else {
+            //have expression
+            String exp = items[1];
+            principalLists = hasAdminClient.getPrincipals(exp);
+        }
+
+        if (principalLists.size() == 0 || principalLists.size() == 1 && principalLists.get(0).isEmpty()) {
+            return;
+        } else {
+            System.out.println("Principals are listed:");
+            for (int i = 0; i < principalLists.size(); i++) {
+                System.out.println(principalLists.get(i));
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
new file mode 100644
index 0000000..7125da6
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/kerby/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
@@ -0,0 +1,91 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.kerby.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.kerby.has.client.HasAdminClient;
+import org.apache.kerby.has.client.HasAuthAdminClient;
+import org.apache.kerby.has.common.HasException;
+
+import java.io.Console;
+import java.util.Scanner;
+
+/**
+ * Remote rename principal cmd
+ */
+public class HasRemoteRenamePrincipalCmd extends HadminRemoteCmd {
+    public static final String USAGE = "Usage: rename_principal <old_principal_name>"
+        + " <new_principal_name>\n"
+        + "\tExample:\n"
+        + "\t\trename_principal alice bob\n";
+
+    public HasRemoteRenamePrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 3) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        String oldPrincipalName = items[items.length - 2];
+        String newPrincipalName = items[items.length - 1];
+
+        String reply;
+        Console console = System.console();
+        String prompt = "Are you sure to rename the principal? (yes/no, YES/NO, y/n, Y/N) ";
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                + "maybe you're running this from within an IDE. "
+                + "Use scanner to read password.");
+            Scanner scanner = new Scanner(System.in, "UTF-8");
+            reply = getReply(scanner, prompt);
+        } else {
+            reply = getReply(console, prompt);
+        }
+        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
+            hasAdminClient.renamePrincipal(oldPrincipalName, newPrincipalName);
+        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
+            System.out.println("Principal \"" + oldPrincipalName + "\"  not renamed.");
+        } else {
+            System.err.println("Unknown request, fail to rename the principal.");
+            System.err.println(USAGE);
+        }
+    }
+
+    private String getReply(Scanner scanner, String prompt) {
+        System.out.println(prompt);
+        return scanner.nextLine().trim();
+    }
+
+    private String getReply(Console console, String prompt) {
+        console.printf(prompt);
+        String line = console.readLine();
+        return line;
+    }
+}


[13/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/kerby/has/client/HasClient.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/kerby/has/client/HasClient.java b/has/has-client/src/main/java/org/apache/kerby/has/client/HasClient.java
new file mode 100755
index 0000000..759c922
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/kerby/has/client/HasClient.java
@@ -0,0 +1,677 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.client;
+
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.ClientHandlerException;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.text.CharacterPredicates;
+import org.apache.commons.text.RandomStringGenerator;
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.has.common.HasConfigKey;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.has.common.ssl.SSLFactory;
+import org.apache.kerby.has.common.util.HasUtil;
+import org.apache.kerby.has.common.util.URLConnectionFactory;
+import org.apache.kerby.kerberos.kerb.KrbCodec;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.KrbRuntime;
+import org.apache.kerby.kerberos.kerb.crypto.EncryptionHandler;
+import org.apache.kerby.kerberos.kerb.provider.TokenEncoder;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptedData;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.KeyUsage;
+import org.apache.kerby.kerberos.kerb.type.base.KrbError;
+import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
+import org.apache.kerby.kerberos.kerb.type.base.KrbMessageType;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
+import org.apache.kerby.kerberos.kerb.type.kdc.EncAsRepPart;
+import org.apache.kerby.kerberos.kerb.type.kdc.EncKdcRepPart;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcRep;
+import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
+import org.apache.kerby.util.IOUtil;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.ProtocolException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.nio.ByteBuffer;
+import java.security.GeneralSecurityException;
+import java.security.KeyStore;
+import java.security.PublicKey;
+import java.security.cert.CertificateException;
+import java.security.cert.CertificateFactory;
+import java.security.cert.X509Certificate;
+import java.util.Date;
+
+/**
+ * HAS client
+ */
+public class HasClient {
+
+    public static final Logger LOG = LoggerFactory.getLogger(HasClient.class);
+
+    public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf";
+    public static final String HAS_HTTP_PORT_DEFAULT = "9870";
+    public static final String HAS_CONFIG_DEFAULT = "/etc/has/has-client.conf";
+    public static final String CA_ROOT_DEFAULT = "/etc/has/ca-root.pem";
+
+    private String hadoopSecurityHas = null;
+    private String type;
+    private File clientConfigFolder;
+
+
+    public HasClient() { }
+
+    /**
+     * Create an instance of the HasClient.
+     *
+     * @param hadoopSecurityHas the has config
+     */
+    public HasClient(String hadoopSecurityHas) {
+        this.hadoopSecurityHas = hadoopSecurityHas;
+    }
+
+
+    public TgtTicket requestTgt() throws HasException {
+        HasConfig config;
+        if (hadoopSecurityHas == null) {
+            String hasClientConf = System.getenv("HAS_CLIENT_CONF");
+            if (hasClientConf == null) {
+                hasClientConf = HAS_CONFIG_DEFAULT;
+            }
+            LOG.debug("has-client conf path: " + hasClientConf);
+            File confFile = new File(hasClientConf);
+            if (!confFile.exists()) {
+                throw new HasException("The HAS client config file: " + hasClientConf
+                    + " does not exist.");
+            }
+            try {
+                config = HasUtil.getHasConfig(confFile);
+            } catch (HasException e) {
+                LOG.error("Failed to get has client config: " + e.getMessage());
+                throw new HasException("Failed to get has client config: " + e);
+            }
+        } else {
+            config = new HasConfig();
+            String[] urls = hadoopSecurityHas.split(";");
+            String host = "";
+            int port = 0;
+            try {
+                for (String url : urls) {
+                    URI uri = new URI(url.trim());
+
+                    // parse host
+                    host = host + uri.getHost() + ",";
+
+                    // parse port
+                    if (port == 0) {
+                        port = uri.getPort();
+                    } else {
+                        if (port != uri.getPort()) {
+                            throw new HasException("Invalid port: not even.");
+                        }
+                    }
+
+                    // We will get the auth type from env first
+                    type = System.getenv("auth_type");
+                    // parse host
+                    if (type == null) {
+                        String[] strs = uri.getQuery().split("=");
+                        if (strs[0].equals("auth_type")) {
+                            type = strs[1];
+                        } else {
+                            LOG.warn("No auth type in conf.");
+                        }
+                    }
+                }
+                if (host == null || port == 0) {
+                    throw new HasException("host is null.");
+                } else {
+                    host = host.substring(0, host.length() - 1);
+                    config.setString(HasConfigKey.HTTPS_HOST, host);
+                    config.setInt(HasConfigKey.HTTPS_PORT, port);
+                    config.setString(HasConfigKey.AUTH_TYPE, type);
+                }
+            } catch (URISyntaxException e) {
+                LOG.error("Errors occurred when getting web url. " + e.getMessage());
+                throw new HasException(
+                    "Errors occurred when getting web url. " + e.getMessage());
+            }
+        }
+        if (config == null) {
+            throw new HasException("Failed to get HAS client config.");
+        }
+        clientConfigFolder = new File("/etc/has/" + config.getHttpsHost());
+        if (!clientConfigFolder.exists()) {
+            clientConfigFolder.mkdirs();
+        }
+
+        // get and set ssl-client/trustStore first
+        String sslClientConfPath = clientConfigFolder + "/ssl-client.conf";
+        loadSslClientConf(config, sslClientConfPath);
+        config.setString(HasConfigKey.SSL_CLIENT_CONF, sslClientConfPath);
+
+        createKrb5Conf(config);
+
+        HasClientPlugin plugin;
+        try {
+            plugin = getClientTokenPlugin(config);
+        } catch (HasException e) {
+            LOG.error("Failed to get client token plugin from config: " + e.getMessage());
+            throw new HasException(
+                "Failed to get client token plugin from config: " + e.getMessage());
+        }
+        AuthToken authToken;
+        try {
+            authToken = plugin.login(config);
+        } catch (HasLoginException e) {
+            LOG.error("Plugin login failed: " + e.getMessage());
+            throw new HasException(
+                "Plugin login failed: " + e.getMessage());
+        }
+        type = plugin.getLoginType();
+
+        LOG.info("The plugin type is: " + type);
+
+        return requestTgt(authToken, type, config);
+    }
+
+    private void createKrb5Conf(HasConfig config) throws HasException {
+        HasAdminClient hasAdminClient = new HasAdminClient(config);
+        File krb5Conf = new File(clientConfigFolder + "/krb5.conf");
+        if (!krb5Conf.exists()) {
+            String content = hasAdminClient.getKrb5conf();
+            if (content == null) {
+                LOG.error("Failed to get krb5.conf.");
+                throw new HasException("Failed to get krb5.conf.");
+            }
+            try {
+                PrintStream ps = new PrintStream(new FileOutputStream(krb5Conf));
+                ps.println(content);
+                LOG.info("krb5.conf has saved in : " + krb5Conf.getAbsolutePath());
+            } catch (FileNotFoundException e) {
+                LOG.error(e.getMessage());
+                throw new HasException(e);
+            }
+        }
+        System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5Conf.getAbsolutePath());
+    }
+
+
+    private HasClientPlugin getClientTokenPlugin(HasConfig config) throws HasException {
+        String pluginName = config.getPluginName();
+        LOG.info("The plugin name getting from config is: " + pluginName);
+        HasClientPlugin clientPlugin;
+        if (pluginName != null) {
+            clientPlugin = HasClientPluginRegistry.createPlugin(pluginName);
+        } else {
+            throw new HasException("Please set the plugin name in has client conf");
+        }
+        if (clientPlugin == null) {
+            throw new HasException("Failed to create client plugin: " + pluginName);
+        }
+        LOG.info("The plugin class is: " + clientPlugin);
+
+        return clientPlugin;
+    }
+
+    /**
+     * Request a TGT with user token, plugin type and has config.
+     * @param authToken
+     * @param type
+     * @param config
+     * @return TGT
+     * @throws HasException e
+     */
+    public TgtTicket requestTgt(AuthToken authToken, String type, HasConfig config)
+        throws HasException {
+        TokenEncoder tokenEncoder = KrbRuntime.getTokenProvider("JWT").createTokenEncoder();
+
+        String tokenString;
+        try {
+            tokenString = tokenEncoder.encodeAsString(authToken);
+        } catch (KrbException e) {
+            LOG.debug("Failed to decode the auth token.");
+            throw new HasException("Failed to decode the auth token." + e.getMessage());
+        }
+
+        JSONObject json = null;
+        int responseStatus = 0;
+        boolean success = false;
+        if ((config.getHttpsPort() != null) && (config.getHttpsHost() != null)) {
+            String sslClientConfPath = clientConfigFolder + "/ssl-client.conf";
+            config.setString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
+            config.setString(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfPath);
+            config.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, false);
+
+            URLConnectionFactory connectionFactory = URLConnectionFactory
+                .newDefaultURLConnectionFactory(config);
+
+            URL url;
+            String[] hosts = config.getHttpsHost().split(",");
+            for (String host : hosts) {
+                try {
+                    url = new URL("https://" + host.trim() + ":" + config.getHttpsPort()
+                        + "/has/v1?type=" + type + "&authToken=" + tokenString);
+                } catch (MalformedURLException e) {
+                    LOG.warn("Failed to get url. " + e.toString());
+                    continue;
+                }
+                HttpURLConnection conn;
+                try {
+                    conn = (HttpURLConnection) connectionFactory.openConnection(url);
+                } catch (IOException e) {
+                    LOG.warn("Failed to open connection. " + e.toString());
+                    continue;
+                }
+
+                conn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
+                try {
+                    conn.setRequestMethod("PUT");
+                } catch (ProtocolException e) {
+                    LOG.warn("Failed to set request method. " + e.toString());
+                    continue;
+                }
+                conn.setDoOutput(true);
+                conn.setDoInput(true);
+
+                try {
+                    conn.connect();
+
+                    responseStatus = conn.getResponseCode();
+                    switch (responseStatus) {
+                        case 200:
+                        case 201:
+                            BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+                            StringBuilder sb = new StringBuilder();
+                            String line;
+                            while ((line = br.readLine()) != null) {
+                                sb.append(line + "\n");
+                            }
+                            br.close();
+
+                            json = new JSONObject(sb.toString());
+                    }
+
+                } catch (IOException | JSONException e) {
+                    LOG.warn("ERROR! " + e.toString());
+                    continue;
+                }
+
+                if (responseStatus == 200 || responseStatus == 201) {
+                    success = true;
+                    break;
+                }
+            }
+            if (!success) {
+                throw new HasException("Failed : HTTP error code : "
+                    + responseStatus);
+            }
+        } else {
+            WebResource webResource;
+            Client client = Client.create();
+            String[] hosts = config.getHttpHost().split(",");
+            for (String host : hosts) {
+                webResource = client
+                    .resource("http://" + host.trim() + ":" + config.getHttpPort()
+                        + "/has/v1?type=" + type + "&authToken="
+                        + tokenString);
+                try {
+                    ClientResponse response = webResource.accept("application/json")
+                        .put(ClientResponse.class);
+
+                    if (response.getStatus() != 200) {
+                        LOG.warn("WARN! " + response.getEntity(String.class));
+                        responseStatus = response.getStatus();
+                        continue;
+                    }
+                    json = response.getEntity(JSONObject.class);
+                } catch (ClientHandlerException e) {
+                    LOG.warn("WARN! " + e.toString());
+                    continue;
+                }
+                success = true;
+                break;
+            }
+            if (!success) {
+                throw new HasException("Failed : HTTP error code : "
+                    + responseStatus);
+            }
+        }
+
+        LOG.debug("Return from Server .... \n");
+
+        try {
+            return handleResponse(json, (String) authToken.getAttributes().get("passPhrase"));
+        } catch (HasException e) {
+            LOG.debug("Failed to handle response when requesting tgt ticket in client."
+                + e.getMessage());
+            throw new HasException(e);
+        }
+    }
+
+    private File loadSslClientConf(HasConfig config, String sslClientConfPath) throws HasException {
+        File sslClientConf = new File(sslClientConfPath);
+        if (!sslClientConf.exists()) {
+            String httpHost = config.getHttpHost();
+            String httpPort = config.getHttpPort();
+            if (httpHost == null) {
+                LOG.info("Can't find the http host in config, the https host will be used.");
+                httpHost = config.getHttpsHost();
+            }
+            if (httpPort == null) {
+                LOG.info("Can't find the http port in config, the default http port will be used.");
+                httpPort = HAS_HTTP_PORT_DEFAULT;
+            }
+            X509Certificate certificate = getCertificate(httpHost, httpPort);
+            if (verifyCertificate(certificate)) {
+                String password = createTrustStore(config.getHttpsHost(), certificate);
+                createClientSSLConfig(password);
+            } else {
+                throw new HasException("The certificate from HAS server is invalid.");
+            }
+        }
+        return sslClientConf;
+    }
+
+    public KrbMessage getKrbMessage(JSONObject json) throws HasException {
+
+        LOG.debug("Starting to get the message from has server.");
+
+        try {
+            boolean success = json.getBoolean("success");
+            if (!success) {
+                throw new HasException("Failed: " + json.getString("krbMessage"));
+            }
+        } catch (JSONException e) {
+            LOG.debug("Failed to get message." + e);
+            throw new HasException("Failed to get message." + e);
+        }
+
+        String typeString;
+        try {
+            typeString = json.getString("type");
+        } catch (JSONException e) {
+            LOG.debug("Failed to get message." + e);
+            throw new HasException("Failed to get message." + e);
+        }
+
+        if (typeString != null && typeString.equals(type)) {
+            LOG.debug("The message type is " + type);
+            String krbMessageString = null;
+            try {
+                krbMessageString = json.getString("krbMessage");
+            } catch (JSONException e) {
+                LOG.debug("Failed to get the krbMessage. " + e);
+            }
+            Base64 base64 = new Base64(0);
+            byte[] krbMessage = base64.decode(krbMessageString);
+            ByteBuffer byteBuffer = ByteBuffer.wrap(krbMessage);
+            KrbMessage kdcRep;
+            try {
+                kdcRep = KrbCodec.decodeMessage(byteBuffer);
+            } catch (IOException e) {
+                throw new HasException("Krb decoding message failed", e);
+            }
+            return kdcRep;
+        } else {
+            throw new HasException("Can't get the right message from server.");
+        }
+    }
+
+    public TgtTicket handleResponse(JSONObject json, String passPhrase)
+        throws HasException {
+        KrbMessage kdcRep = getKrbMessage(json);
+
+        KrbMessageType messageType = kdcRep.getMsgType();
+        if (messageType == KrbMessageType.AS_REP) {
+            return processResponse((KdcRep) kdcRep, passPhrase);
+        } else if (messageType == KrbMessageType.KRB_ERROR) {
+            KrbError error = (KrbError) kdcRep;
+            LOG.error("KDC server response with message: "
+                + error.getErrorCode().getMessage());
+
+            throw new HasException(error.getEtext());
+        }
+        return null;
+    }
+
+    public TgtTicket processResponse(KdcRep kdcRep, String passPhrase)
+        throws HasException {
+
+        PrincipalName clientPrincipal = kdcRep.getCname();
+        String clientRealm = kdcRep.getCrealm();
+        clientPrincipal.setRealm(clientRealm);
+
+        // Get the client to decrypt the EncryptedData
+        EncryptionKey clientKey = null;
+        try {
+            clientKey = HasUtil.getClientKey(clientPrincipal.getName(),
+                passPhrase,
+                kdcRep.getEncryptedEncPart().getEType());
+        } catch (KrbException e) {
+            throw new HasException("Could not generate key. " + e.getMessage());
+        }
+
+        byte[] decryptedData = decryptWithClientKey(kdcRep.getEncryptedEncPart(),
+            KeyUsage.AS_REP_ENCPART, clientKey);
+        if ((decryptedData[0] & 0x1f) == 26) {
+            decryptedData[0] = (byte) (decryptedData[0] - 1);
+        }
+        EncKdcRepPart encKdcRepPart = new EncAsRepPart();
+        try {
+            encKdcRepPart.decode(decryptedData);
+        } catch (IOException e) {
+            throw new HasException("Failed to decode EncAsRepPart", e);
+        }
+        kdcRep.setEncPart(encKdcRepPart);
+
+//        if (getChosenNonce() != encKdcRepPart.getNonce()) {
+//            throw new KrbException("Nonce didn't match");
+//        }
+
+//        PrincipalName returnedServerPrincipal = encKdcRepPart.getSname();
+//        returnedServerPrincipal.setRealm(encKdcRepPart.getSrealm());
+//        PrincipalName requestedServerPrincipal = getServerPrincipal();
+//        if (requestedServerPrincipal.getRealm() == null) {
+//            requestedServerPrincipal.setRealm(getContext().getKrbSetting().getKdcRealm());
+//        }
+//        if (!returnedServerPrincipal.equals(requestedServerPrincipal)) {
+//            throw new KrbException(KrbErrorCode.KDC_ERR_SERVER_NOMATCH);
+//        }
+
+//        HostAddresses hostAddresses = getHostAddresses();
+//        if (hostAddresses != null) {
+//            List<HostAddress> requestHosts = hostAddresses.getElements();
+//            if (!requestHosts.isEmpty()) {
+//                List<HostAddress> responseHosts = encKdcRepPart.getCaddr().getElements();
+//                for (HostAddress h : requestHosts) {
+//                    if (!responseHosts.contains(h)) {
+//                        throw new KrbException("Unexpected client host");
+//                    }
+//                }
+//            }
+//        }
+
+        TgtTicket tgtTicket = getTicket(kdcRep);
+        LOG.info("Ticket expire time: " + tgtTicket.getEncKdcRepPart().getEndTime());
+        return tgtTicket;
+
+    }
+
+    protected byte[] decryptWithClientKey(EncryptedData data,
+                                          KeyUsage usage,
+                                          EncryptionKey clientKey) throws HasException {
+        if (clientKey == null) {
+            throw new HasException("Client key isn't available");
+        }
+        try {
+            return EncryptionHandler.decrypt(data, clientKey, usage);
+        } catch (KrbException e) {
+            throw new HasException("Errors occurred when decrypting the data." + e.getMessage());
+        }
+    }
+
+    /**
+     * Get the tgt ticket from KdcRep
+     *
+     * @param kdcRep
+     */
+    public TgtTicket getTicket(KdcRep kdcRep) {
+        TgtTicket tgtTicket = new TgtTicket(kdcRep.getTicket(),
+            (EncAsRepPart) kdcRep.getEncPart(), kdcRep.getCname());
+        return tgtTicket;
+    }
+
+    /**
+     * Get certificate from HAS server.
+     *
+     */
+    private X509Certificate getCertificate(String host, String port) throws HasException {
+        X509Certificate certificate;
+        Client client = Client.create();
+        WebResource webResource = client.resource("http://" + host + ":" + port + "/has/v1/getcert");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        if (response.getStatus() != 200) {
+            throw new HasException(response.getEntity(String.class));
+        }
+        try {
+            CertificateFactory factory = CertificateFactory.getInstance("X.509");
+            InputStream in = response.getEntityInputStream();
+            certificate = (X509Certificate) factory.generateCertificate(in);
+        } catch (CertificateException e) {
+            throw new HasException("Failed to get certificate from HAS server", e);
+        }
+
+        return certificate;
+    }
+
+    /**
+     * Verify certificate.
+     */
+    private boolean verifyCertificate(X509Certificate certificate) throws HasException {
+        // Check if certificate is expired
+        try {
+            Date date = new Date();
+            certificate.checkValidity(date);
+        } catch (GeneralSecurityException e) {
+            return false;
+        }
+
+        // Get certificate from ca root
+        X509Certificate caRoot;
+        try {
+            //Get the ca root path from env, client should export it.
+            String caRootPath = System.getenv("CA_ROOT");
+            if (caRootPath == null) {
+                caRootPath = CA_ROOT_DEFAULT;
+            }
+            File caRootFile;
+            if (caRootPath != null) {
+                caRootFile = new File(caRootPath);
+                if (!caRootFile.exists()) {
+                    throw new HasException("CA_ROOT: " + caRootPath + " not exist.");
+                }
+            } else {
+                throw new HasException("Please set the CA_ROOT.");
+            }
+
+            CertificateFactory factory = CertificateFactory.getInstance("X.509");
+            FileInputStream in = new FileInputStream(caRootFile);
+            caRoot = (X509Certificate) factory.generateCertificate(in);
+        } catch (CertificateException | FileNotFoundException e) {
+            throw new HasException("Failed to get certificate from ca root file", e);
+        }
+
+        // Verify certificate with root certificate
+        try {
+            PublicKey publicKey = caRoot.getPublicKey();
+            certificate.verify(publicKey);
+        } catch (GeneralSecurityException e) {
+            return false;
+        }
+
+        return true;
+    }
+
+    /**
+     * Create and save truststore file based on certificate.
+     *
+     */
+    private String createTrustStore(String host, X509Certificate certificate) throws HasException {
+        KeyStore trustStore;
+
+        // Create password
+        RandomStringGenerator generator = new RandomStringGenerator.Builder()
+            .withinRange('a', 'z')
+            .filteredBy(CharacterPredicates.LETTERS, CharacterPredicates.DIGITS)
+            .build();
+        String password = generator.generate(15);
+
+        File trustStoreFile = new File(clientConfigFolder + "/truststore.jks");
+        try {
+            trustStore = KeyStore.getInstance("jks");
+            trustStore.load(null, null);
+            trustStore.setCertificateEntry(host, certificate);
+            FileOutputStream out = new FileOutputStream(trustStoreFile);
+            trustStore.store(out, password.toCharArray());
+            out.close();
+        } catch (IOException | GeneralSecurityException e) {
+            throw new HasException("Failed to create and save truststore file", e);
+        }
+        return password;
+    }
+
+    /**
+     * Create ssl configuration file for client.
+     *
+     */
+    private void createClientSSLConfig(String password) throws HasException {
+        String resourcePath = "/ssl-client.conf.template";
+        InputStream templateResource = getClass().getResourceAsStream(resourcePath);
+        try {
+            String content = IOUtil.readInput(templateResource);
+            content = content.replaceAll("_location_", clientConfigFolder.getAbsolutePath()
+                + "/truststore.jks");
+            content = content.replaceAll("_password_", password);
+
+            IOUtil.writeFile(content, new File(clientConfigFolder + "/ssl-client.conf"));
+        } catch (IOException e) {
+            throw new HasException("Failed to create client ssl configuration file", e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/kerby/has/client/HasClientPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/kerby/has/client/HasClientPlugin.java b/has/has-client/src/main/java/org/apache/kerby/has/client/HasClientPlugin.java
new file mode 100644
index 0000000..03b04b6
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/kerby/has/client/HasClientPlugin.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kerby.has.client;
+
+import org.apache.kerby.has.common.HasConfig;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+
+public interface HasClientPlugin {
+
+    /**
+     * Get the login module type ID, used to distinguish this module from others.
+     * Should correspond to the server side module.
+     *
+     * @return login type
+     */
+    String getLoginType();
+
+    /**
+     * Perform all the client side login logics, the results wrapped in an AuthToken,
+     * will be validated by HAS server.
+     *
+     * @param conf token plugin config
+     * @return user auth token
+     */
+    AuthToken login(HasConfig conf) throws HasLoginException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/kerby/has/client/HasClientPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/kerby/has/client/HasClientPluginRegistry.java b/has/has-client/src/main/java/org/apache/kerby/has/client/HasClientPluginRegistry.java
new file mode 100644
index 0000000..45cd193
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/kerby/has/client/HasClientPluginRegistry.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.client;
+
+import org.apache.kerby.has.common.HasException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.ServiceLoader;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class HasClientPluginRegistry {
+    static final Logger LOG = LoggerFactory.getLogger(HasClientPluginRegistry.class);
+
+    private static Map<String, Class> allPlugins = new ConcurrentHashMap<>();
+
+    static {
+        ServiceLoader<HasClientPlugin> plugins = ServiceLoader.load(HasClientPlugin.class);
+
+        for (HasClientPlugin plugin : plugins) {
+            allPlugins.put(plugin.getLoginType(), plugin.getClass());
+        }
+    }
+
+    public static Set<String> registeredPlugins() {
+        return Collections.unmodifiableSet(allPlugins.keySet());
+    }
+
+    public static boolean registeredPlugin(String name) {
+        return allPlugins.containsKey(name);
+    }
+
+    public static HasClientPlugin createPlugin(String name) throws HasException {
+        if (!registeredPlugin(name)) {
+            throw new HasException("Unregistered plugin " + name);
+        }
+        try {
+            HasClientPlugin clientPlugin = (HasClientPlugin) allPlugins.get(name).newInstance();
+            return clientPlugin;
+        } catch (Exception e) {
+            LOG.error("Create {} plugin failed", name, e);
+            throw new HasException(e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/kerby/has/client/HasLoginException.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/kerby/has/client/HasLoginException.java b/has/has-client/src/main/java/org/apache/kerby/has/client/HasLoginException.java
new file mode 100644
index 0000000..2157537
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/kerby/has/client/HasLoginException.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kerby.has.client;
+
+import org.apache.kerby.has.common.HasException;
+
+public class HasLoginException extends HasException {
+    private static final long serialVersionUID = 4140429098192628252L;
+
+    public HasLoginException(Throwable cause) {
+        super(cause);
+    }
+
+    public HasLoginException(String message) {
+        super(message);
+    }
+
+    public HasLoginException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-client/src/main/java/org/apache/kerby/has/client/HasLoginModule.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/kerby/has/client/HasLoginModule.java b/has/has-client/src/main/java/org/apache/kerby/has/client/HasLoginModule.java
new file mode 100644
index 0000000..91f3e35
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/kerby/has/client/HasLoginModule.java
@@ -0,0 +1,491 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.kerby.has.client;
+
+import com.sun.security.auth.module.Krb5LoginModule;
+import org.apache.kerby.has.common.HasException;
+import org.apache.kerby.kerberos.kerb.ccache.Credential;
+import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import sun.security.jgss.krb5.Krb5Util;
+import sun.security.krb5.Credentials;
+import sun.security.krb5.KrbException;
+import sun.security.krb5.PrincipalName;
+
+import javax.security.auth.DestroyFailedException;
+import javax.security.auth.Subject;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.security.auth.kerberos.KerberosTicket;
+import javax.security.auth.login.LoginException;
+import javax.security.auth.spi.LoginModule;
+import java.io.IOException;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Login with tgt ticket
+ * The client's TGT will be retrieved from the API of HasClient
+ */
+//CHECKSTYLE.OFF
+public class HasLoginModule implements LoginModule {
+
+    public static final Logger LOG = LoggerFactory.getLogger(HasLoginModule.class);
+
+    Krb5LoginModule krb5LoginModule;
+
+    // initial state
+    private Subject subject;
+    private CallbackHandler callbackHandler;
+    private Map<String, Object> sharedState;
+    private Map<String, ?> options;
+
+    // configurable option
+    private boolean debug = false;
+    private boolean doNotPrompt = false;
+    private boolean useTgtTicket = false;
+    private String hadoopSecurityHas = null;
+    private String princName = null;
+
+    private boolean refreshKrb5Config = false;
+
+    // specify if initiator.
+    // perform authentication exchange if initiator
+    private boolean isInitiator = true;
+
+    // the authentication status
+    private boolean succeeded = false;
+    private boolean commitSucceeded = false;
+
+    private Credentials cred = null;
+
+    private PrincipalName principal = null;
+    private KerberosPrincipal kerbClientPrinc = null;
+    private KerberosTicket kerbTicket = null;
+    private StringBuffer krb5PrincName = null;
+    private boolean unboundServer = false;
+
+    /**
+     * Initialize this <code>LoginModule</code>.
+     * <p>
+     * <p>
+     *
+     * @param subject         the <code>Subject</code> to be authenticated. <p>
+     * @param callbackHandler a <code>CallbackHandler</code> for
+     *                        communication with the end user (prompting for
+     *                        usernames and passwords, for example). <p>
+     * @param sharedState     shared <code>LoginModule</code> state. <p>
+     * @param options         options specified in the login
+     *                        <code>Configuration</code> for this particular
+     *                        <code>LoginModule</code>.
+     */
+    public void initialize(Subject subject,
+                           CallbackHandler callbackHandler,
+                           Map<String, ?> sharedState,
+                           Map<String, ?> options) {
+
+        this.subject = subject;
+        this.callbackHandler = callbackHandler;
+        this.sharedState = (Map<String, Object>) sharedState;
+        this.options = options;
+
+        // initialize any configured options
+        useTgtTicket = "true".equalsIgnoreCase((String) options.get("useTgtTicket"));
+
+        if (useTgtTicket) {
+            debug = "true".equalsIgnoreCase((String) options.get("debug"));
+            doNotPrompt = "true".equalsIgnoreCase((String) options.get("doNotPrompt"));
+            useTgtTicket = "true".equalsIgnoreCase((String) options.get("useTgtTicket"));
+            hadoopSecurityHas = (String) options.get("hadoopSecurityHas");
+            princName = (String) options.get("principal");
+            refreshKrb5Config =
+                "true".equalsIgnoreCase((String) options.get("refreshKrb5Config"));
+
+            // check isInitiator value
+            String isInitiatorValue = ((String) options.get("isInitiator"));
+            if (isInitiatorValue != null) {
+                // use default, if value not set
+                isInitiator = "true".equalsIgnoreCase(isInitiatorValue);
+            }
+
+            if (debug) {
+                System.out.print("Debug is  " + debug
+                    + " doNotPrompt " + doNotPrompt
+                    + " isInitiator " + isInitiator
+                    + " refreshKrb5Config is " + refreshKrb5Config
+                    + " principal is " + princName + "\n");
+            }
+        } else {
+            krb5LoginModule = new Krb5LoginModule();
+            krb5LoginModule.initialize(subject, callbackHandler, sharedState, options);
+        }
+    }
+
+    /**
+     * Authenticate the user
+     * <p>
+     * <p>
+     *
+     * @return true in all cases since this <code>LoginModule</code>
+     * should not be ignored.
+     * @throws LoginException       if this <code>LoginModule</code>
+     *                              is unable to perform the authentication.
+     */
+    public boolean login() throws LoginException {
+
+        if (useTgtTicket) {
+            if (refreshKrb5Config) {
+                try {
+                    if (debug) {
+                        System.out.println("Refreshing Kerberos configuration");
+                    }
+                    sun.security.krb5.Config.refresh();
+                } catch (KrbException ke) {
+                    LoginException le = new LoginException(ke.getMessage());
+                    le.initCause(ke);
+                    throw le;
+                }
+            }
+            String principalProperty = System.getProperty("sun.security.krb5.principal");
+            if (principalProperty != null) {
+                krb5PrincName = new StringBuffer(principalProperty);
+            } else {
+                if (princName != null) {
+                    krb5PrincName = new StringBuffer(princName);
+                }
+            }
+
+            validateConfiguration();
+
+            if (krb5PrincName != null && krb5PrincName.toString().equals("*")) {
+                unboundServer = true;
+            }
+
+            // attempt the authentication by getting the username and pwd
+            // by prompting or configuration i.e. not from shared state
+
+            try {
+                attemptAuthentication(false);
+                succeeded = true;
+                cleanState();
+                return true;
+            } catch (LoginException e) {
+                // authentication failed -- clean out state
+                if (debug) {
+                    System.out.println("\t\t[HasLoginModule] "
+                        + "authentication failed \n"
+                        + e.getMessage());
+                }
+                succeeded = false;
+                cleanState();
+                throw e;
+            }
+        } else {
+            succeeded = krb5LoginModule.login();
+            return succeeded;
+        }
+    }
+
+    /**
+     * Process the configuration options
+     * Get the TGT from Has Client
+     */
+
+    private void attemptAuthentication(boolean getPasswdFromSharedState)
+        throws LoginException {
+
+        /*
+         * Check the creds cache to see whether
+         * we have TGT for this client principal
+         */
+        if (krb5PrincName != null) {
+            try {
+                principal = new PrincipalName(krb5PrincName.toString(),
+                        PrincipalName.KRB_NT_PRINCIPAL);
+            } catch (KrbException e) {
+                LoginException le = new LoginException(e.getMessage());
+                le.initCause(e);
+                throw le;
+            }
+        }
+
+        try {
+            if (useTgtTicket) {
+                if (debug) {
+                    System.out.println("use tgt ticket to login, acquire TGT TICKET...");
+                }
+
+                HasClient hasClient = new HasClient(hadoopSecurityHas);
+                TgtTicket tgtTicket = null;
+                try {
+                    tgtTicket = hasClient.requestTgt();
+                } catch (HasException e) {
+                    LoginException le = new LoginException(e.getMessage());
+                    le.initCause(e);
+                    throw le;
+                }
+                Credential credential = new Credential(tgtTicket);
+                boolean[] flags = new boolean[7];
+                int flag = credential.getTicketFlags().getFlags();
+                for (int i = 6; i >= 0; i--) {
+                    flags[i] = (flag & (1 << i)) != 0;
+                }
+                Date startTime = null;
+                if (credential.getStartTime() != null) {
+                    startTime = credential.getStartTime().getValue();
+                }
+                cred = new Credentials(credential.getTicket().encode(),
+                    credential.getClientName().getName(),
+                    credential.getServerName().getName(),
+                    credential.getKey().getKeyData(),
+                    credential.getKey().getKeyType().getValue(),
+                    flags,
+                    credential.getAuthTime().getValue(),
+                    startTime,
+                    credential.getEndTime().getValue(),
+                    credential.getRenewTill().getValue(),
+                    null);
+
+                if (cred != null) {
+                    // get the principal name from the ticket cache
+                    if (principal == null) {
+                        principal = cred.getClient();
+                    }
+                }
+                if (debug) {
+                    System.out.println("Principal is " + principal);
+                    if (cred == null) {
+                        System.out.println("null credentials from TGT Ticket");
+                    }
+                }
+            }
+        } catch (KrbException e) {
+            LoginException le = new LoginException(e.getMessage());
+            le.initCause(e);
+            throw le;
+        } catch (IOException ioe) {
+            LoginException ie = new LoginException(ioe.getMessage());
+            ie.initCause(ioe);
+            throw ie;
+        }
+    }
+
+    private void validateConfiguration() throws LoginException {
+        if (doNotPrompt && !useTgtTicket) {
+            throw new LoginException("Configuration Error"
+                + " - either doNotPrompt should be "
+                + " false or"
+                + " useTgtTicket"
+                + " should be true");
+        }
+
+        if (krb5PrincName != null && krb5PrincName.toString().equals("*")) {
+            if (isInitiator) {
+                throw new LoginException("Configuration Error"
+                        + " - principal cannot be * when isInitiator is true");
+            }
+        }
+    }
+
+    /**
+     * <p> This method is called if the LoginContext's
+     * overall authentication succeeded
+     *
+     * @return true if this LoginModule's own login and commit
+     * attempts succeeded, or false otherwise.
+     * @throws LoginException if the commit fails.
+     */
+
+    public boolean commit() throws LoginException {
+        if (debug) {
+            System.out.println("Login success? " + succeeded);
+        }
+
+        if (useTgtTicket) {
+        /*
+         * Let us add the Krb5 Creds to the Subject's
+         * private credentials. The credentials are of type
+         * KerberosKey or KerberosTicket
+         */
+            if (succeeded == false) {
+                return false;
+            } else {
+
+                if (isInitiator && (cred == null)) {
+                    succeeded = false;
+                    throw new LoginException("Null Client Credential");
+                }
+
+                if (subject.isReadOnly()) {
+                    cleanKerberosCred();
+                    throw new LoginException("Subject is Readonly");
+                }
+
+            /*
+             * Add the Principal (authenticated identity)
+             * to the Subject's principal set and
+             * add the credentials (TGT or Service key) to the
+             * Subject's private credentials
+             */
+
+                Set<Object> privCredSet = subject.getPrivateCredentials();
+                Set<java.security.Principal> princSet = subject.getPrincipals();
+                kerbClientPrinc = new KerberosPrincipal(principal.getName());
+
+                // create Kerberos Ticket
+                if (isInitiator) {
+                    kerbTicket = Krb5Util.credsToTicket(cred);
+                }
+
+                // Let us add the kerbClientPrinc,kerbTicket
+
+                // We won't add "*" as a KerberosPrincipal
+                if (!unboundServer
+                    && !princSet.contains(kerbClientPrinc)) {
+                    princSet.add(kerbClientPrinc);
+                }
+
+                // add the TGT
+                if (kerbTicket != null) {
+                    if (!privCredSet.contains(kerbTicket)) {
+                        privCredSet.add(kerbTicket);
+                    }
+                }
+            }
+            commitSucceeded = true;
+            if (debug) {
+                System.out.println("Commit Succeeded \n");
+            }
+            return true;
+        } else {
+            return krb5LoginModule.commit();
+        }
+    }
+
+    /**
+     * <p> This method is called if the LoginContext's
+     * overall authentication failed.
+     *
+     * @return false if this LoginModule's own login and/or commit attempts
+     * failed, and true otherwise.
+     * @throws LoginException if the abort fails.
+     */
+
+    public boolean abort() throws LoginException {
+        if (useTgtTicket) {
+            if (succeeded == false) {
+                return false;
+            } else if (succeeded == true && commitSucceeded == false) {
+                // login succeeded but overall authentication failed
+                succeeded = false;
+                cleanKerberosCred();
+            } else {
+                // overall authentication succeeded and commit succeeded,
+                // but someone else's commit failed
+                logout();
+            }
+            return true;
+        } else {
+            return krb5LoginModule.abort();
+        }
+    }
+
+    /**
+     * Logout the user.
+     * <p>
+     * <p> This method removes the <code>Krb5Principal</code>
+     * that was added by the <code>commit</code> method.
+     * <p>
+     * <p>
+     *
+     * @return true in all cases since this <code>LoginModule</code>
+     * should not be ignored.
+     * @throws LoginException if the logout fails.
+     */
+    public boolean logout() throws LoginException {
+
+        if (useTgtTicket) {
+            if (debug) {
+                System.out.println("\t\t[Krb5LoginModule]: "
+                    + "Entering logout");
+            }
+
+            if (subject.isReadOnly()) {
+                cleanKerberosCred();
+                throw new LoginException("Subject is Readonly");
+            }
+
+            subject.getPrincipals().remove(kerbClientPrinc);
+            // Let us remove all Kerberos credentials stored in the Subject
+            Iterator<Object> it = subject.getPrivateCredentials().iterator();
+            while (it.hasNext()) {
+                Object o = it.next();
+                if (o instanceof KerberosTicket) {
+                    it.remove();
+                }
+            }
+            // clean the kerberos ticket and keys
+            cleanKerberosCred();
+
+            succeeded = false;
+            commitSucceeded = false;
+            if (debug) {
+                System.out.println("\t\t[HasLoginModule]: "
+                    + "logged out Subject");
+            }
+            return true;
+        } else {
+            return krb5LoginModule.logout();
+        }
+    }
+
+    /**
+     * Clean Kerberos credentials
+     */
+    private void cleanKerberosCred() throws LoginException {
+        // Clean the ticket and server key
+        try {
+            if (kerbTicket != null) {
+                kerbTicket.destroy();
+            }
+        } catch (DestroyFailedException e) {
+            throw new LoginException("Destroy Failed on Kerberos Private Credentials");
+        }
+        kerbTicket = null;
+        kerbClientPrinc = null;
+    }
+
+    /**
+     * Clean out the state
+     */
+    private void cleanState() {
+
+        if (!succeeded) {
+            // remove temp results for the next try
+            principal = null;
+        }
+        if (krb5PrincName != null && krb5PrincName.length() != 0) {
+            krb5PrincName.delete(0, krb5PrincName.length());
+        }
+        krb5PrincName = null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-common/pom.xml b/has/has-common/pom.xml
index 3046871..47e3147 100644
--- a/has/has-common/pom.xml
+++ b/has/has-common/pom.xml
@@ -5,7 +5,7 @@
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
-    <groupId>org.apache.hadoop</groupId>
+    <groupId>org.apache.kerby</groupId>
     <artifactId>has-project</artifactId>
     <version>1.0.0-SNAPSHOT</version>
   </parent>
@@ -34,7 +34,7 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-annotations</artifactId>
-      <version>3.0.0-alpha2</version>
+      <version>${hadoop.version}</version>
     </dependency>
     <dependency>
       <groupId>commons-codec</groupId>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/HasAdmin.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasAdmin.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasAdmin.java
deleted file mode 100644
index 94dc5df..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasAdmin.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License. 
- *
- */
-package org.apache.hadoop.has.common;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * Server side admin facilities from remote, similar to MIT kadmin remote mode.
- */
-public interface HasAdmin {
-
-    /**
-     * Get the hadmin principal name.
-     *
-     * @return The hadmin principal name.
-     */
-    String getHadminPrincipal();
-
-    /**
-     * Add principal to backend.
-     *
-     * @param principal The principal to be added into backend
-     * @throws HasException e
-     */
-    void addPrincipal(String principal) throws HasException;
-
-    /**
-     * Add principal to backend.
-     *
-     * @param principal The principal to be added into backend
-     * @param password  The password to create encryption key
-     * @throws HasException e
-     */
-    void addPrincipal(String principal, String password) throws HasException;
-
-    /**
-     * Export all the keys of the specified principal into the specified keytab
-     * file.
-     *
-     * @param keytabFile The keytab file
-     * @param principal The principal name
-     * @throws HasException e
-     */
-    void exportKeytab(File keytabFile, String principal) throws HasException;
-
-    /**
-     * Export all the keys of the specified principals into the specified keytab
-     * file.
-     *
-     * @param keytabFile The keytab file
-     * @param principals The principal names
-     * @throws HasException e
-     */
-    void exportKeytab(File keytabFile, List<String> principals) throws HasException;
-
-    /**
-     * Delete the principal in backend.
-     *
-     * @param principal The principal to be deleted from backend
-     * @throws HasException e
-     */
-    void deletePrincipal(String principal) throws HasException;
-
-    /**
-     * Rename the principal.
-     *
-     * @param oldPrincipalName The original principal name
-     * @param newPrincipalName The new principal name
-     * @throws HasException e
-     */
-    void renamePrincipal(String oldPrincipalName,
-                         String newPrincipalName) throws HasException;
-
-    /**
-     * Get all the principal names from backend.
-     *
-     * @return principal list
-     * @throws HasException e
-     */
-    List<String> getPrincipals() throws HasException;
-
-    /**
-     * Get all the principal names that meets the pattern
-     *
-     * @param globString The glob string for matching
-     * @return Principal names
-     * @throws HasException e
-     */
-    List<String> getPrincipals(String globString) throws HasException;
-
-    /**
-     * Change the password of specified principal.
-     *
-     * @param principal The principal to be updated password
-     * @param newPassword The new password
-     * @throws HasException e
-     */
-//    void changePassword(String principal, String newPassword) throws HasException;
-
-    /**
-     * Update the random keys of specified principal.
-     *
-     * @param principal The principal to be updated keys
-     * @throws HasException e
-     */
-//    void updateKeys(String principal) throws HasException;
-
-    /**
-     * Release any resources associated.
-     *
-     * @throws HasException e
-     */
-//    void release() throws HasException;
-
-    String addPrincByRole(String host, String role) throws HasException;
-
-    File getKeytabByHostAndRole(String host, String role) throws HasException;
-
-    int size() throws HasException;
-
-    void setEnableOfConf(String isEnable) throws HasException;
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfig.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfig.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfig.java
deleted file mode 100644
index 3fc0998..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfig.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.common;
-
-import org.apache.kerby.kerberos.kerb.common.Krb5Conf;
-
-import java.io.File;
-
-/**
- * AK configuration API.
- */
-public class HasConfig extends Krb5Conf {
-    private File confDir;
-
-    public void setConfDir(File dir) {
-        this.confDir = dir;
-    }
-
-    public File getConfDir() {
-        return confDir;
-    }
-
-    public String getHttpsHost() {
-        return getString(HasConfigKey.HTTPS_HOST, false, "HAS");
-    }
-
-    public String getHttpsPort() {
-        return getString(HasConfigKey.HTTPS_PORT, false, "HAS");
-    }
-
-    public String getHttpHost() {
-        return getString(HasConfigKey.HTTP_HOST, false, "HAS");
-    }
-
-    public String getHttpPort() {
-        return getString(HasConfigKey.HTTP_PORT, false, "HAS");
-    }
-
-    public String getPluginName() {
-        return getString(HasConfigKey.AUTH_TYPE, true, "PLUGIN");
-    }
-
-    public String getRealm() {
-        return getString(HasConfigKey.REALM, false, "HAS");
-    }
-
-    public String getSslServerConf() {
-        return getString(HasConfigKey.SSL_SERVER_CONF, true, "HAS");
-    }
-
-    public String getSslClientConf() {
-        return getString(HasConfigKey.SSL_CLIENT_CONF, true, "HAS");
-    }
-
-    public String getFilterAuthType() {
-        return getString(HasConfigKey.FILTER_AUTH_TYPE, true, "HAS");
-    }
-
-    public String getKerberosPrincipal() {
-        return getString(HasConfigKey.KERBEROS_PRINCIPAL, false, "HAS");
-    }
-
-    public String getKerberosKeytab() {
-        return getString(HasConfigKey.KERBEROS_KEYTAB, false, "HAS");
-    }
-
-    public String getKerberosNameRules() {
-        return getString(HasConfigKey.KERBEROS_NAME_RULES, false, "HAS");
-    }
-
-    public String getAdminKeytab() {
-        return getString(HasConfigKey.ADMIN_KEYTAB, false, "HAS");
-    }
-
-    public String getAdminKeytabPrincipal() {
-        return getString(HasConfigKey.ADMIN_KEYTAB_PRINCIPAL, false, "HAS");
-    }
-
-    public String getEnableConf() {
-        return getString(HasConfigKey.ENABLE_CONF, false, "HAS");
-    }
-
-    public String getSslClientCert() {
-        return getString(HasConfigKey.SSL_CLIENT_CERT, true, "HAS");
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfigKey.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfigKey.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfigKey.java
deleted file mode 100644
index 07db8d4..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfigKey.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.common;
-
-import org.apache.kerby.config.ConfigKey;
-
-public enum HasConfigKey implements ConfigKey {
-    HTTP_HOST,
-    HTTP_PORT,
-    HTTPS_HOST,
-    HTTPS_PORT,
-    AUTH_TYPE("RAM"),
-    REALM,
-    ENABLE_CONF,
-    SSL_SERVER_CONF("/etc/has/ssl-server.conf"),
-    SSL_CLIENT_CONF("/etc/has/ssl-client.conf"),
-    SSL_CLIENT_CERT("/etc/has/cert-signed"),
-    FILTER_AUTH_TYPE("kerberos"),
-    KERBEROS_PRINCIPAL,
-    KERBEROS_KEYTAB,
-    KERBEROS_NAME_RULES,
-    ADMIN_KEYTAB,
-    ADMIN_KEYTAB_PRINCIPAL;
-
-    private Object defaultValue;
-
-    HasConfigKey() {
-        this.defaultValue = null;
-    }
-
-    HasConfigKey(Object defaultValue) {
-        this.defaultValue = defaultValue;
-    }
-
-    @Override
-    public String getPropertyKey() {
-        return name().toLowerCase();
-    }
-
-    @Override
-    public Object getDefaultValue() {
-        return this.defaultValue;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/HasException.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasException.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasException.java
deleted file mode 100644
index f8fc3b3..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasException.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.has.common;
-
-public class HasException extends Exception {
-
-    private static final long serialVersionUID = -1916788959202646914L;
-
-    /**
-     * Creates an {@link HasException}.
-     *
-     * @param cause original exception.
-     */
-    public HasException(Throwable cause) {
-        super(cause);
-    }
-
-    /**
-     * Creates an {@link HasException}.
-     *
-     * @param message exception message.
-     */
-    public HasException(String message) {
-        super(message);
-    }
-
-    /**
-     * Creates an {@link HasException}.
-     *
-     * @param message exception message.
-     * @param cause   original exception.
-     */
-    public HasException(String message, Throwable cause) {
-        super(message, cause);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthToken.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthToken.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthToken.java
deleted file mode 100644
index c7a18da..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthToken.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/**
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License. See accompanying LICENSE file.
- */
-package org.apache.hadoop.has.common.spnego;
-
-import java.security.Principal;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.StringTokenizer;
-
-/**
- * Borrow the class from Apache hadoop
- */
-public class AuthToken implements Principal {
-
-  /**
-   * Constant that identifies an anonymous request.
-   */
-
-  private static final String ATTR_SEPARATOR = "&";
-  private static final String USER_NAME = "u";
-  private static final String PRINCIPAL = "p";
-  private static final String EXPIRES = "e";
-  private static final String TYPE = "t";
-
-  private static final Set<String> ATTRIBUTES =
-    new HashSet<String>(Arrays.asList(USER_NAME, PRINCIPAL, EXPIRES, TYPE));
-
-  private String userName;
-  private String principal;
-  private String type;
-  private long expires;
-  private String tokenStr;
-
-  protected AuthToken() {
-    userName = null;
-    principal = null;
-    type = null;
-    expires = -1;
-    tokenStr = "ANONYMOUS";
-    generateToken();
-  }
-
-  private static final String ILLEGAL_ARG_MSG = " is NULL, empty or contains a '" + ATTR_SEPARATOR + "'";
-
-  /**
-   * Creates an authentication token.
-   *
-   * @param userName user name.
-   * @param principal principal (commonly matches the user name, with Kerberos is the full/long principal
-   * name while the userName is the short name).
-   * @param type the authentication mechanism name.
-   * (<code>System.currentTimeMillis() + validityPeriod</code>).
-   */
-  public AuthToken(String userName, String principal, String type) {
-    checkForIllegalArgument(userName, "userName");
-    checkForIllegalArgument(principal, "principal");
-    checkForIllegalArgument(type, "type");
-    this.userName = userName;
-    this.principal = principal;
-    this.type = type;
-    this.expires = -1;
-  }
-  
-  /**
-   * Check if the provided value is invalid. Throw an error if it is invalid, NOP otherwise.
-   * 
-   * @param value the value to check.
-   * @param name the parameter name to use in an error message if the value is invalid.
-   */
-  protected static void checkForIllegalArgument(String value, String name) {
-    if (value == null || value.length() == 0 || value.contains(ATTR_SEPARATOR)) {
-      throw new IllegalArgumentException(name + ILLEGAL_ARG_MSG);
-    }
-  }
-
-  /**
-   * Sets the expiration of the token.
-   *
-   * @param expires expiration time of the token in milliseconds since the epoch.
-   */
-  public void setExpires(long expires) {
-    this.expires = expires;
-      generateToken();
-  }
-
-  /**
-   * Returns true if the token has expired.
-   *
-   * @return true if the token has expired.
-   */
-  public boolean isExpired() {
-    return getExpires() != -1 && System.currentTimeMillis() > getExpires();
-  }
-
-  /**
-   * Generates the token.
-   */
-  private void generateToken() {
-    StringBuffer sb = new StringBuffer();
-    sb.append(USER_NAME).append("=").append(getUserName()).append(ATTR_SEPARATOR);
-    sb.append(PRINCIPAL).append("=").append(getName()).append(ATTR_SEPARATOR);
-    sb.append(TYPE).append("=").append(getType()).append(ATTR_SEPARATOR);
-    sb.append(EXPIRES).append("=").append(getExpires());
-    tokenStr = sb.toString();
-  }
-
-  /**
-   * Returns the user name.
-   *
-   * @return the user name.
-   */
-  public String getUserName() {
-    return userName;
-  }
-
-  /**
-   * Returns the principal name (this method name comes from the JDK {@link Principal} interface).
-   *
-   * @return the principal name.
-   */
-  @Override
-  public String getName() {
-    return principal;
-  }
-
-  /**
-   * Returns the authentication mechanism of the token.
-   *
-   * @return the authentication mechanism of the token.
-   */
-  public String getType() {
-    return type;
-  }
-
-  /**
-   * Returns the expiration time of the token.
-   *
-   * @return the expiration time of the token, in milliseconds since Epoc.
-   */
-  public long getExpires() {
-    return expires;
-  }
-
-  /**
-   * Returns the string representation of the token.
-   * <p>
-   * This string representation is parseable by the {@link #parse} method.
-   *
-   * @return the string representation of the token.
-   */
-  @Override
-  public String toString() {
-    return tokenStr;
-  }
-
-  public static AuthToken parse(String tokenStr) throws AuthenticationException {
-    if (tokenStr.length() >= 2) {
-      // strip the \" at the two ends of the tokenStr
-      if (tokenStr.charAt(0) == '\"'
-          && tokenStr.charAt(tokenStr.length() - 1) == '\"') {
-        tokenStr = tokenStr.substring(1, tokenStr.length() - 1);
-      }
-    }
-    Map<String, String> map = split(tokenStr);
-    // remove the signature part, since client doesn't care about it
-    map.remove("s");
-
-    if (!map.keySet().equals(ATTRIBUTES)) {
-      throw new AuthenticationException("Invalid token string, missing attributes");
-    }
-    long expires = Long.parseLong(map.get(EXPIRES));
-    AuthToken token = new AuthToken(map.get(USER_NAME), map.get(PRINCIPAL), map.get(TYPE));
-    token.setExpires(expires);
-    return token;
-  }
-
-  /**
-   * Splits the string representation of a token into attributes pairs.
-   *
-   * @param tokenStr string representation of a token.
-   *
-   * @return a map with the attribute pairs of the token.
-   *
-   * @throws AuthenticationException thrown if the string representation of the token could not be broken into
-   * attribute pairs.
-   */
-  private static Map<String, String> split(String tokenStr) throws AuthenticationException {
-    Map<String, String> map = new HashMap<String, String>();
-    StringTokenizer st = new StringTokenizer(tokenStr, ATTR_SEPARATOR);
-    while (st.hasMoreTokens()) {
-      String part = st.nextToken();
-      int separator = part.indexOf('=');
-      if (separator == -1) {
-        throw new AuthenticationException("Invalid authentication token");
-      }
-      String key = part.substring(0, separator);
-      String value = part.substring(separator + 1);
-      map.put(key, value);
-    }
-    return map;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticatedURL.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticatedURL.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticatedURL.java
deleted file mode 100644
index ccd7ea4..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticatedURL.java
+++ /dev/null
@@ -1,282 +0,0 @@
-/**
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License. See accompanying LICENSE file.
- */
-package org.apache.hadoop.has.common.spnego;
-
-import org.apache.hadoop.has.common.util.ConnectionConfigurator;
-
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- * <p>
- * The authentication mechanisms supported by default are Hadoop Simple  authentication
- * (also known as pseudo authentication) and Kerberos SPNEGO authentication.
- * <p>
- * Additional authentication mechanisms can be supported via {@link Authenticator} implementations.
- * <p>
- * The default {@link Authenticator} is the {@link KerberosAuthenticator} class which supports
- * automatic fallback from Kerberos SPNEGO to Hadoop Simple authentication.
- * <p>
- * <code>AuthenticatedURL</code> instances are not thread-safe.
- * <p>
- * The usage pattern of the {@link AuthenticatedURL} is:
- * <pre>
- *
- * // establishing an initial connection
- *
- * URL url = new URL("http://foo:8080/bar");
- * AuthenticatedURL.Token token = new AuthenticatedURL.Token();
- * AuthenticatedURL aUrl = new AuthenticatedURL();
- * HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
- * ....
- * // use the 'conn' instance
- * ....
- *
- * // establishing a follow up connection using a token from the previous connection
- *
- * HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
- * ....
- * // use the 'conn' instance
- * ....
- *
- * </pre>
- */
-public class AuthenticatedURL {
-
-  /**
-   * Name of the HTTP cookie used for the authentication token between the client and the server.
-   */
-  public static final String AUTH_COOKIE = "hadoop.auth";
-
-  private static final String AUTH_COOKIE_EQ = AUTH_COOKIE + "=";
-
-  /**
-   * Client side authentication token.
-   */
-  public static class Token {
-
-    private String token;
-
-    /**
-     * Creates a token.
-     */
-    public Token() {
-    }
-
-    /**
-     * Creates a token using an existing string representation of the token.
-     *
-     * @param tokenStr string representation of the tokenStr.
-     */
-    public Token(String tokenStr) {
-      if (tokenStr == null) {
-        throw new IllegalArgumentException("tokenStr cannot be null");
-      }
-      set(tokenStr);
-    }
-
-    /**
-     * Returns if a token from the server has been set.
-     *
-     * @return if a token from the server has been set.
-     */
-    public boolean isSet() {
-      return token != null;
-    }
-
-    /**
-     * Sets a token.
-     *
-     * @param tokenStr string representation of the tokenStr.
-     */
-    void set(String tokenStr) {
-      token = tokenStr;
-    }
-
-    /**
-     * Returns the string representation of the token.
-     *
-     * @return the string representation of the token.
-     */
-    @Override
-    public String toString() {
-      return token;
-    }
-
-  }
-
-  private static Class<? extends Authenticator> defaultAuthenticator
-      = KerberosAuthenticator.class;
-
-  /**
-   * Sets the default {@link Authenticator} class to use when an {@link AuthenticatedURL} instance
-   * is created without specifying an authenticator.
-   *
-   * @param authenticator the authenticator class to use as default.
-   */
-  public static void setDefaultAuthenticator(Class<? extends Authenticator> authenticator) {
-    defaultAuthenticator = authenticator;
-  }
-
-  /**
-   * Returns the default {@link Authenticator} class to use when an {@link AuthenticatedURL} instance
-   * is created without specifying an authenticator.
-   *
-   * @return the authenticator class to use as default.
-   */
-  public static Class<? extends Authenticator> getDefaultAuthenticator() {
-    return defaultAuthenticator;
-  }
-
-  private Authenticator authenticator;
-  private ConnectionConfigurator connConfigurator;
-
-  /**
-   * Creates an {@link AuthenticatedURL}.
-   */
-  public AuthenticatedURL() {
-    this(null);
-  }
-
-  /**
-   * Creates an <code>AuthenticatedURL</code>.
-   *
-   * @param authenticator the {@link Authenticator} instance to use, if <code>null</code> a {@link
-   * KerberosAuthenticator} is used.
-   */
-  public AuthenticatedURL(Authenticator authenticator) {
-    this(authenticator, null);
-  }
-
-  /**
-   * Creates an <code>AuthenticatedURL</code>.
-   *
-   * @param authenticator the {@link Authenticator} instance to use, if <code>null</code> a {@link
-   * KerberosAuthenticator} is used.
-   * @param connConfigurator a connection configurator.
-   */
-  public AuthenticatedURL(Authenticator authenticator,
-                          ConnectionConfigurator connConfigurator) {
-    try {
-      this.authenticator = (authenticator != null) ? authenticator : defaultAuthenticator.newInstance();
-    } catch (Exception ex) {
-      throw new RuntimeException(ex);
-    }
-    this.connConfigurator = connConfigurator;
-    this.authenticator.setConnectionConfigurator(connConfigurator);
-  }
-
-  /**
-   * Returns the {@link Authenticator} instance used by the
-   * <code>AuthenticatedURL</code>.
-   *
-   * @return the {@link Authenticator} instance
-   */
-  protected Authenticator getAuthenticator() {
-    return authenticator;
-  }
-
-  /**
-   * Returns an authenticated {@link HttpURLConnection}.
-   *
-   * @param url the URL to connect to. Only HTTP/S URLs are supported.
-   * @param token the authentication token being used for the user.
-   *
-   * @return an authenticated {@link HttpURLConnection}.
-   *
-   * @throws IOException if an IO error occurred.
-   * @throws AuthenticationException if an authentication exception occurred.
-   */
-  public HttpURLConnection openConnection(URL url, Token token) throws IOException, AuthenticationException {
-    if (url == null) {
-      throw new IllegalArgumentException("url cannot be NULL");
-    }
-    if (!url.getProtocol().equalsIgnoreCase("http") && !url.getProtocol().equalsIgnoreCase("https")) {
-      throw new IllegalArgumentException("url must be for a HTTP or HTTPS resource");
-    }
-    if (token == null) {
-      throw new IllegalArgumentException("token cannot be NULL");
-    }
-    authenticator.authenticate(url, token);
-    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-    if (connConfigurator != null) {
-      conn = connConfigurator.configure(conn);
-    }
-    injectToken(conn, token);
-    return conn;
-  }
-
-  /**
-   * Helper method that injects an authentication token to send with a connection.
-   *
-   * @param conn connection to inject the authentication token into.
-   * @param token authentication token to inject.
-   */
-  public static void injectToken(HttpURLConnection conn, Token token) {
-    String t = token.token;
-    if (t != null) {
-      if (!t.startsWith("\"")) {
-        t = "\"" + t + "\"";
-      }
-      conn.addRequestProperty("Cookie", AUTH_COOKIE_EQ + t);
-    }
-  }
-
-  /**
-   * Helper method that extracts an authentication token received from a connection.
-   * <p>
-   * This method is used by {@link Authenticator} implementations.
-   *
-   * @param conn connection to extract the authentication token from.
-   * @param token the authentication token.
-   *
-   * @throws IOException if an IO error occurred.
-   * @throws AuthenticationException if an authentication exception occurred.
-   */
-  public static void extractToken(HttpURLConnection conn, Token token) throws IOException, AuthenticationException {
-    int respCode = conn.getResponseCode();
-    if (respCode == HttpURLConnection.HTTP_OK
-        || respCode == HttpURLConnection.HTTP_CREATED
-        || respCode == HttpURLConnection.HTTP_ACCEPTED) {
-      Map<String, List<String>> headers = conn.getHeaderFields();
-      List<String> cookies = headers.get("Set-Cookie");
-      if (cookies != null) {
-        for (String cookie : cookies) {
-          if (cookie.startsWith(AUTH_COOKIE_EQ)) {
-            String value = cookie.substring(AUTH_COOKIE_EQ.length());
-            int separator = value.indexOf(";");
-            if (separator > -1) {
-              value = value.substring(0, separator);
-            }
-            if (value.length() > 0) {
-              token.set(value);
-            }
-          }
-        }
-      }
-    } else {
-      token.set(null);
-      throw new AuthenticationException("Authentication failed, status: " + conn.getResponseCode()
-          + ", message: " + conn.getResponseMessage());
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticationException.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticationException.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticationException.java
deleted file mode 100644
index 62a5d38..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticationException.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License. See accompanying LICENSE file.
- */
-package org.apache.hadoop.has.common.spnego;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- * Exception thrown when an authentication error occurrs.
- */
-public class AuthenticationException extends Exception {
-  
-  static final long serialVersionUID = 0;
-
-  /**
-   * Creates an {@link AuthenticationException}.
-   *
-   * @param cause original exception.
-   */
-  public AuthenticationException(Throwable cause) {
-    super(cause);
-  }
-
-  /**
-   * Creates an {@link AuthenticationException}.
-   *
-   * @param msg exception message.
-   */
-  public AuthenticationException(String msg) {
-    super(msg);
-  }
-
-  /**
-   * Creates an {@link AuthenticationException}.
-   *
-   * @param msg exception message.
-   * @param cause original exception.
-   */
-  public AuthenticationException(String msg, Throwable cause) {
-    super(msg, cause);
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/Authenticator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/Authenticator.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/Authenticator.java
deleted file mode 100644
index 91eb1a0..0000000
--- a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/Authenticator.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License. See accompanying LICENSE file.
- */
-package org.apache.hadoop.has.common.spnego;
-
-import org.apache.hadoop.has.common.util.ConnectionConfigurator;
-
-import java.io.IOException;
-import java.net.URL;
-
-/**
- * Borrow the class from Apache Hadoop
- */
-
-/**
- * Interface for client authentication mechanisms.
- * <p>
- * Implementations are use-once instances, they don't need to be thread safe.
- */
-public interface Authenticator {
-
-  /**
-   * Sets a {@link ConnectionConfigurator} instance to use for
-   * configuring connections.
-   *
-   * @param configurator the {@link ConnectionConfigurator} instance.
-   */
-  void setConnectionConfigurator(ConnectionConfigurator configurator);
-
-  /**
-   * Authenticates against a URL and returns a {@link AuthenticatedURL.Token} to be
-   * used by subsequent requests.
-   *
-   * @param url the URl to authenticate against.
-   * @param token the authentication token being used for the user.
-   *
-   * @throws IOException if an IO error occurred.
-   * @throws AuthenticationException if an authentication error occurred.
-   */
-  void authenticate(URL url, AuthenticatedURL.Token token) throws IOException, AuthenticationException;
-
-}