You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by pl...@apache.org on 2017/11/15 05:12:06 UTC

[01/10] directory-kerby git commit: Add the HAS project to Kerby.

Repository: directory-kerby
Updated Branches:
  refs/heads/has 1e6d36497 -> be5805660


http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
new file mode 100644
index 0000000..322eafd
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
@@ -0,0 +1,61 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+
+public class AddPrincipalCmd extends HadminCmd {
+
+    public static final String USAGE = "Usage: add_principal [options] <principal-name>\n"
+            + "\toptions are:\n"
+            + "\t\t[-randkey]\n"
+            + "\t\t[-pw password]"
+            + "\tExample:\n"
+            + "\t\tadd_principal -pw mypassword alice\n";
+
+    public AddPrincipalCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        String clientPrincipal = items[items.length - 1];
+        if (!items[1].startsWith("-")) {
+            getHadmin().addPrincipal(clientPrincipal);
+        } else if (items[1].startsWith("-randkey")) {
+            getHadmin().addPrincipal(clientPrincipal);
+        } else if (items[1].startsWith("-pw")) {
+            String password = items[2];
+            getHadmin().addPrincipal(clientPrincipal, password);
+        } else {
+            System.err.println("add_principal cmd format error.");
+            System.err.println(USAGE);
+            return;
+        }
+        System.out.println("Success to add principal :" + clientPrincipal);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
new file mode 100644
index 0000000..b38f2c7
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
@@ -0,0 +1,78 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+
+public class AddPrincipalsCmd extends HadminCmd {
+    private static final Logger LOG = LoggerFactory.getLogger(AddPrincipalsCmd.class);
+
+    private static final String USAGE = "\nUsage: create_principals [hostRoles-file]\n"
+            + "\t'hostRoles-file' is a file with a hostRoles json string like:\n"
+            + "\t\t{HOSTS: [ {\"name\":\"host1\",\"hostRoles\":\"HDFS\"}, "
+            + "{\"name\":\"host2\",\"hostRoles\":\"HDFS,HBASE\"} ] }\n"
+            + "\tExample:\n"
+            + "\t\tcreate_principals hostroles.txt\n";
+
+    public AddPrincipalsCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length != 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        File hostRoles = new File(items[1]);
+        if (!hostRoles.exists()) {
+            throw new HasException("HostRoles file is not exists.");
+        }
+        try {
+            BufferedReader reader = new BufferedReader(new FileReader(hostRoles));
+            StringBuilder sb = new StringBuilder();
+            String tempString;
+            while ((tempString = reader.readLine()) != null) {
+                sb.append(tempString);
+            }
+            JSONArray hostArray = new JSONObject(sb.toString()).optJSONArray("HOSTS");
+            for (int i = 0; i < hostArray.length(); i++) {
+                JSONObject host = (JSONObject) hostArray.get(i);
+                String[] roles = host.getString("hostRoles").split(",");
+                for (String role : roles) {
+                    System.out.println(getHadmin().addPrincByRole(host.getString("name"),
+                            role.toUpperCase()));
+                }
+            }
+        } catch (Exception e) {
+            throw new HasException("Failed to execute creating principals, because : " + e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
new file mode 100644
index 0000000..98458ec
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
@@ -0,0 +1,80 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+
+import java.io.Console;
+import java.util.Scanner;
+
+public class DeletePrincipalCmd extends HadminCmd {
+
+    public static final String USAGE = "Usage: delete_principal <principal-name>\n"
+            + "\tExample:\n"
+            + "\t\tdelete_principal alice\n";
+
+    private Boolean force = false;
+
+    public DeletePrincipalCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+        String principal = items[items.length - 1];
+        String reply;
+        Console console = System.console();
+        String prompt = "Are you sure to delete the principal? (yes/no, YES/NO, y/n, Y/N) ";
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                    + "maybe you're running this from within an IDE. "
+                    + "Use scanner to read password.");
+            Scanner scanner = new Scanner(System.in, "UTF-8");
+            reply = getReply(scanner, prompt);
+        } else {
+            reply = getReply(console, prompt);
+        }
+        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
+            getHadmin().deletePrincipal(principal);
+            System.out.println("Success to delete " + principal);
+        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
+            System.out.println("Principal \"" + principal + "\"  not deleted.");
+        } else {
+            System.err.println("Unknown request, fail to delete the principal.");
+            System.err.println(USAGE);
+        }
+    }
+
+    private String getReply(Scanner scanner, String prompt) {
+        System.out.println(prompt);
+        return scanner.nextLine().trim();
+    }
+
+    private String getReply(Console console, String prompt) {
+        console.printf(prompt);
+        String line = console.readLine();
+        return line;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
new file mode 100644
index 0000000..66eb5cb
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
@@ -0,0 +1,40 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+
+public class DisableConfigureCmd extends HadminCmd {
+
+    public static final String USAGE = "Usage: enable_configure\n"
+            + "\tExample:\n"
+            + "\t\tenable\n";
+
+    public DisableConfigureCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        getHadmin().setEnableOfConf("false");
+        System.out.println("Set conf disable.");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
new file mode 100644
index 0000000..f40a6c6
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
@@ -0,0 +1,40 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+
+public class EnableConfigureCmd extends HadminCmd {
+
+    public static final String USAGE = "Usage: enable_configure\n"
+            + "\tExample:\n"
+            + "\t\tenable\n";
+
+    public EnableConfigureCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        getHadmin().setEnableOfConf("true");
+        System.out.println("Set conf enable.");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
new file mode 100644
index 0000000..c5b130c
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
@@ -0,0 +1,57 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+import org.apache.hadoop.has.server.web.HostRoleType;
+
+import java.io.File;
+
+public class ExportKeytabsCmd extends HadminCmd {
+    private static final String USAGE = "\nUsage: export_keytabs <host> [role]\n"
+            + "\tExample:\n"
+            + "\t\texport_keytabs host1 HDFS\n";
+
+    public ExportKeytabsCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+        String host = items[1];
+        if (items.length >= 3) {
+            exportKeytab(host, items[2]);
+            return;
+        }
+        for (HostRoleType r : HostRoleType.values()) {
+            exportKeytab(host, r.getName());
+        }
+    }
+
+    public void exportKeytab(String host, String role) throws HasException {
+        File keytab = new File(role + "-" + host + ".keytab");
+        getHadmin().getKeytabByHostAndRole(host, role, keytab);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
new file mode 100644
index 0000000..ebaf07f
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
@@ -0,0 +1,36 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+public class GetHostRolesCmd extends HadminCmd {
+    private static final String USAGE = "Usage: get_hostroles\n"
+            + "\tExample:\n"
+            + "\t\tget_hostroles\n";
+
+    public GetHostRolesCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) {
+        getHadmin().getHostRoles();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
new file mode 100644
index 0000000..88612a8
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
@@ -0,0 +1,76 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+import org.apache.kerby.kerberos.kerb.identity.KrbIdentity;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+
+import java.util.Map;
+
+public class GetPrincipalCmd extends HadminCmd {
+    private static final String USAGE = "Usage: getprinc principalName\n"
+        + "\tExample:\n"
+        + "\t\tgetprinc hello@TEST.COM\"\n";
+
+    public GetPrincipalCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+  @Override
+  public void execute(String[] items) {
+    if (items.length != 2) {
+      System.err.println(USAGE);
+      return;
+    }
+
+    String princName = items[items.length - 1];
+    KrbIdentity identity = null;
+    try {
+      identity = getHadmin().getPrincipal(princName);
+    } catch (HasException e) {
+      System.err.println("Fail to get principal: " + princName + ". " + e.getMessage());
+    }
+
+    if (identity == null) {
+      System.err.println(princName + " doesn't exist\n");
+      System.err.println(USAGE);
+      return;
+    }
+
+    Map<EncryptionType, EncryptionKey> key = identity.getKeys();
+
+    System.out.println(
+        "Principal: " + identity.getPrincipalName() + "\n"
+            + "Expiration data: " + identity.getExpireTime() + "\n"
+            + "Created time: "
+            + identity.getCreatedTime() + "\n"
+            + "KDC flags: " + identity.getKdcFlags() + "\n"
+            + "Key version: " + identity.getKeyVersion() + "\n"
+            + "Number of keys: " + key.size()
+    );
+
+    for (EncryptionType keyType : key.keySet()) {
+      System.out.println("key: " + keyType);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
new file mode 100644
index 0000000..95ce59f
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
@@ -0,0 +1,42 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+
+public abstract class HadminCmd {
+
+    private LocalHasAdmin hadmin;
+
+    public HadminCmd(LocalHasAdmin hadmin) {
+        this.hadmin = hadmin;
+    }
+
+    protected LocalHasAdmin getHadmin() {
+        return hadmin;
+    }
+
+    /**
+     * Execute the hadmin cmd.
+     * @param input Input cmd to execute
+     */
+    public abstract void execute(String[] input) throws HasException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
new file mode 100644
index 0000000..99e05e2
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
@@ -0,0 +1,91 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+
+import java.io.File;
+import java.util.List;
+
+public class KeytabAddCmd extends HadminCmd {
+    private static final String USAGE =
+        "Usage: ktadd [-k[eytab] keytab] [-q] [-e keysaltlist] [-norandkey] [principal | -glob princ-exp] [...]";
+
+    private static final String DEFAULT_KEYTAB_FILE_LOCATION = "/etc/krb5.keytab";
+
+    public KeytabAddCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) {
+
+        String principal = null;
+        String keytabFileLocation = null;
+        Boolean glob = false;
+
+        //Since commands[0] is ktadd, the initial index is 1.
+        int index = 1;
+        while (index < items.length) {
+            String command = items[index];
+            if (command.equals("-k")) {
+                index++;
+                if (index >= items.length) {
+                    System.err.println(USAGE);
+                    return;
+                }
+                keytabFileLocation = items[index].trim();
+
+            } else if (command.equals("-glob")) {
+                glob = true;
+            } else if (!command.startsWith("-")) {
+                principal = command;
+            }
+            index++;
+        }
+
+        if (keytabFileLocation == null) {
+            keytabFileLocation = DEFAULT_KEYTAB_FILE_LOCATION;
+        }
+        File keytabFile = new File(keytabFileLocation);
+
+        if (principal == null) {
+            System.out.println((glob ? "princ-exp" : "principal") + " not specified!");
+            System.err.println(USAGE);
+            return;
+        }
+
+        try {
+            if (glob) {
+                List<String> principals = getHadmin().getPrincipals(principal);
+                if (principals.size() != 0) {
+                    getHadmin().exportKeytab(keytabFile, principals);
+                }
+            } else {
+                getHadmin().exportKeytab(keytabFile, principal);
+            }
+            System.out.println("Principal export to keytab file : " + keytabFile + " successful .");
+        } catch (HasException e) {
+            System.err.println("Principal \"" + principal + "\" fail to add entry to keytab."
+                    + e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
new file mode 100644
index 0000000..ef9e7f7
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
@@ -0,0 +1,63 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+
+import java.util.List;
+
+public class ListPrincipalsCmd extends HadminCmd {
+    private static final String USAGE = "Usage: list_principals [expression]\n"
+            + "\t'expression' is a shell-style glob expression that can contain the wild-card characters ?, *, and []."
+            + "\tExample:\n"
+            + "\t\tlist_principals [expression]\n";
+
+    public ListPrincipalsCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length > 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        List<String> principalLists = null;
+
+        if (items.length == 1) {
+            principalLists = getHadmin().getPrincipals();
+        } else {
+            //have expression
+            String exp = items[1];
+            principalLists = getHadmin().getPrincipals(exp);
+        }
+
+        if (principalLists.size() == 0 || principalLists.size() == 1 && principalLists.get(0).isEmpty()) {
+            return;
+        } else {
+            System.out.println("Principals are listed:");
+            for (int i = 0; i < principalLists.size(); i++) {
+                System.out.println(principalLists.get(i));
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
new file mode 100644
index 0000000..2c0ba20
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
@@ -0,0 +1,82 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+
+import java.io.Console;
+import java.util.Scanner;
+
+public class RenamePrincipalCmd extends HadminCmd {
+    public static final String USAGE = "Usage: rename_principal <old_principal_name>"
+            + " <new_principal_name>\n"
+            + "\tExample:\n"
+            + "\t\trename_principal alice bob\n";
+
+    public RenamePrincipalCmd(LocalHasAdmin hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 3) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        String oldPrincipalName = items[items.length - 2];
+        String newPrincipalName = items[items.length - 1];
+
+        String reply;
+        Console console = System.console();
+        String prompt = "Are you sure to rename the principal? (yes/no, YES/NO, y/n, Y/N) ";
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                    + "maybe you're running this from within an IDE. "
+                    + "Use scanner to read password.");
+            Scanner scanner = new Scanner(System.in, "UTF-8");
+            reply = getReply(scanner, prompt);
+        } else {
+            reply = getReply(console, prompt);
+        }
+        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
+            getHadmin().renamePrincipal(oldPrincipalName, newPrincipalName);
+            System.out.println("Success to rename principal : \"" + oldPrincipalName
+                + "\" to \"" + newPrincipalName + "\".");
+        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
+            System.out.println("Principal \"" + oldPrincipalName + "\"  not renamed.");
+        } else {
+            System.err.println("Unknown request, fail to rename the principal.");
+            System.err.println(USAGE);
+        }
+    }
+
+    private String getReply(Scanner scanner, String prompt) {
+        System.out.println(prompt);
+        return scanner.nextLine().trim();
+    }
+
+    private String getReply(Console console, String prompt) {
+        console.printf(prompt);
+        String line = console.readLine();
+        return line;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-tool/pom.xml b/has/has-tool/pom.xml
new file mode 100644
index 0000000..a43041a
--- /dev/null
+++ b/has/has-tool/pom.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>has-project</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+  </parent>
+
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>has-tool</artifactId>
+  <packaging>pom</packaging>
+  <description>HAS tool</description>
+  <name>HAS tool</name>
+
+  <modules>
+    <module>has-client-tool</module>
+    <module>has-server-tool</module>
+  </modules>
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/pom.xml
----------------------------------------------------------------------
diff --git a/has/pom.xml b/has/pom.xml
new file mode 100644
index 0000000..ad80711
--- /dev/null
+++ b/has/pom.xml
@@ -0,0 +1,128 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <parent>
+    <groupId>org.apache</groupId>
+    <artifactId>apache</artifactId>
+    <version>18</version>
+    <relativePath/>
+  </parent>
+
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.hadoop</groupId>
+  <artifactId>has-project</artifactId>
+  <version>1.0.0-SNAPSHOT</version>
+  <description>Hadoop Authentication Server</description>
+  <name>Hadoop Authentication Server</name>
+  <packaging>pom</packaging>
+
+  <modules>
+    <module>has-common</module>
+    <module>has-plugins</module>
+    <module>has-server</module>
+    <module>has-client</module>
+    <module>has-dist</module>
+    <module>has-tool</module>
+  </modules>
+
+  <properties>
+    <commons-codec.version>1.4</commons-codec.version>
+    <kerby.version>1.1.0-SNAPSHOT</kerby.version>
+    <slf4j.version>1.7.25</slf4j.version>
+    <buildtools.dir>${basedir}/build-tools</buildtools.dir>
+  </properties>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <configuration>
+          <source>1.8</source>
+          <target>1.8</target>
+        </configuration>
+      </plugin>
+
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+        <version>2.17</version>
+        <configuration>
+          <configLocation>${buildtools.dir}/has-checkstyle.xml</configLocation>
+          <includeTestSourceDirectory>true</includeTestSourceDirectory>
+          <encoding>UTF-8</encoding>
+          <failOnViolation>true</failOnViolation>
+        </configuration>
+        <executions>
+          <execution>
+            <id>validate</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>check</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.17</version>
+        <configuration>
+          <runOrder>alphabetical</runOrder>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+    <profile>
+      <id>nochecks</id>
+      <properties>
+        <pmd.skip>true</pmd.skip>
+        <checkstyle.skip>true</checkstyle.skip>
+      </properties>
+    </profile>
+    <profile>
+      <id>activate-buildtools-in-module</id>
+      <activation>
+        <file>
+          <exists>${basedir}/../build-tools/has-checkstyle.xml</exists>
+        </file>
+      </activation>
+      <properties>
+        <buildtools.dir>${basedir}/../build-tools</buildtools.dir>
+      </properties>
+    </profile>
+    <profile>
+      <id>activate-buildtools-in-submodule</id>
+      <activation>
+        <file>
+          <exists>${basedir}/../../build-tools/has-checkstyle.xml</exists>
+        </file>
+      </activation>
+      <properties>
+        <buildtools.dir>${basedir}/../../build-tools</buildtools.dir>
+      </properties>
+    </profile>
+  </profiles>
+
+</project>
+
+

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/hadoop/README.md
----------------------------------------------------------------------
diff --git a/has/supports/hadoop/README.md b/has/supports/hadoop/README.md
new file mode 100644
index 0000000..15f177c
--- /dev/null
+++ b/has/supports/hadoop/README.md
@@ -0,0 +1,339 @@
+Enable Hadoop
+================
+
+## 1. Build Hadoop
+
+### Apply the patch to hadoop-2.7.2 source code
+```
+git apply hadoop-2.7.2.patch
+```
+
+### Build
+```
+mvn package -Pdist,native -Dtar -DskipTests -Dmaven.javadoc.skip=true -Dcontainer-executor.conf.dir=/etc/hadoop/conf
+```
+
+### Redeploy hadoop
+
+## 2. Distribute and configure Keytab files
+
+### Create keytab and deploy krb5.conf and has-client.conf
+Please look at [How to start HAS](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/has-start.md) for details.
+
+### Distribute keytab files to the corresponding nodes.
+
+### Set permission of keytab files
+```
+// Keytab files should be read-only
+chmod 400 *.keytab
+```
+
+## 3. Update hadoop configuration files
+ 
+### Update core-site.xml
+add the following properties:
+```
+<property>
+  <name>hadoop.security.authorization</name>
+  <value>true</value>
+</property>
+<property>
+  <name>hadoop.security.authentication</name>
+  <value>kerberos</value>
+</property>
+<property>
+   <name>hadoop.security.authentication.use.has</name>
+   <value>true</value>
+</property>
+```
+
+### Update hdfs-site.xml
+add the following properties:
+```
+<!-- General HDFS security config -->
+<property>
+  <name>dfs.block.access.token.enable</name>
+  <value>true</value>
+</property>
+
+<!-- NameNode security config -->
+<property>
+  <name>dfs.namenode.keytab.file</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+<property>
+  <name>dfs.namenode.kerberos.principal</name>
+  <value>hdfs/_HOST@HADOOP.COM</value>
+</property>
+<property>
+  <name>dfs.namenode.kerberos.internal.spnego.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+</property>
+<property>
+  <name>dfs.namenode.delegation.token.max-lifetime</name>
+  <value>604800000</value>
+  <description>The maximum lifetime in milliseconds for which a delegation token is valid.</description>
+</property>
+
+<!-- Secondary NameNode security config -->
+<property>
+  <name>dfs.secondary.namenode.keytab.file</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+<property>
+  <name>dfs.secondary.namenode.kerberos.principal</name>
+  <value>hdfs/_HOST@HADOOP.COM</value>
+</property>
+<property>
+  <name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+</property>
+
+<!-- DataNode security config -->
+<property>
+  <name>dfs.datanode.data.dir.perm</name>
+  <value>700</value>
+</property>
+<property>
+  <name>dfs.datanode.keytab.file</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+<property>
+  <name>dfs.datanode.kerberos.principal</name>
+  <value>hdfs/_HOST@HADOOP.COM</value>
+</property>
+
+<!-- HTTPS config -->
+<property>
+  <name>dfs.http.policy</name>
+  <value>HTTPS_ONLY</value>
+</property>
+<property>
+  <name>dfs.data.transfer.protection</name>
+  <value>integrity</value>
+</property>
+<property>
+  <name>dfs.web.authentication.kerberos.keytab</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+<property>
+  <name>dfs.web.authentication.kerberos.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+</property>
+```
+
+### Configuration for HDFS HA
+
+> For normal configuration, please look at [HDFS High Availability](https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-hdfs/HDFSHighAvailabilityWithNFS.html)
+
+add the following properties in hdfs-site.xml:
+```
+<property>
+  <name>dfs.journalnode.keytab.file</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+<property>
+  <name>dfs.journalnode.kerberos.principal</name>
+  <value>hdfs/_HOST@HADOOP.COM</value>
+</property>
+<property>
+  <name>dfs.journalnode.kerberos.internal.spnego.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+</property>
+```
+
+### Update yarn-site.xml
+add the following properties:
+```
+<!-- ResourceManager security config -->
+<property>
+  <name>yarn.resourcemanager.keytab</name>
+  <value>/etc/hadoop/conf/yarn.keytab</value>
+</property>
+<property>
+  <name>yarn.resourcemanager.principal</name>
+  <value>yarn/_HOST@HADOOP.COM</value>
+</property>
+
+<!-- NodeManager security config -->
+<property>
+  <name>yarn.nodemanager.keytab</name>
+  <value>/etc/hadoop/conf/yarn.keytab</value>
+</property>
+<property>
+  <name>yarn.nodemanager.principal</name> 
+  <value>yarn/_HOST@HADOOP.COM</value>
+</property>
+
+<!-- HTTPS config -->
+<property>
+  <name>mapreduce.jobhistory.http.policy</name>
+  <value>HTTPS_ONLY</value>
+</property>
+
+<!-- Container executor config -->
+<property>
+  <name>yarn.nodemanager.container-executor.class</name>
+  <value>org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor</value>
+</property>
+<property>
+  <name>yarn.nodemanager.linux-container-executor.group</name>
+  <value>root</value>
+</property>
+
+<!-- Timeline service config, if timeline service enabled -->
+<property>
+  <name>yarn.timeline-service.principal</name>
+  <value>yarn/_HOST@HADOOP.COM</value>
+</property>
+
+<property>
+  <name>yarn.timeline-service.keytab</name>
+  <value>/etc/hadoop/conf/yarn.keytab</value>
+</property>
+
+<property>
+  <name>yarn.timeline-service.http-authentication.type</name>
+  <value>kerberos</value>
+</property>
+
+<property>
+  <name>yarn.timeline-service.http-authentication.kerberos.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+</property>
+
+<property>
+  <name>yarn.timeline-service.http-authentication.kerberos.keytab</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+
+<!-- Proxy server config, if web proxy server enabled -->
+<property>
+  <name>yarn.web-proxy.keytab</name>
+  <value>/etc/hadoop/conf/yarn.keytab</value>
+</property>
+
+<property>
+  <name>yarn.web-proxy.principal</name>
+  <value>yarn/_HOST@HADOOP.COM</value>
+</property>
+```
+
+### Update mapred-site.xml
+add the following properties:
+```
+<!-- MapReduce security config -->
+<property>
+  <name>mapreduce.jobhistory.keytab</name>
+  <value>/etc/hadoop/conf/mapred.keytab</value>
+</property>
+<property>
+  <name>mapreduce.jobhistory.principal</name>
+  <value>mapred/_HOST@HADOOP.COM</value>
+</property>
+```
+
+### Create and configure ssl-server.xml
+```
+cd $HADOOP_HOME
+cp etc/hadoop/ssl-server.xml.example etc/hadoop/ssl-server.xml
+```
+
+Configure ssl-server.xml:
+Please look at [How to deploy https](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/deploy-https.md).
+
+## 4. Configure container-executor
+
+### Create and configure container-executor.cfg
+
+Example of container-executor.cfg:
+```
+#configured value of yarn.nodemanager.linux-container-executor.group
+yarn.nodemanager.linux-container-executor.group=root
+#comma separated list of users who can not run applications
+banned.users=bin
+#Prevent other super-users
+min.user.id=0
+#comma separated list of system users who CAN run applications
+allowed.system.users=root,nobody,impala,hive,hdfs,yarn
+```
+
+Set permission:
+```
+mv container-executor.cfg /etc/hadoop/conf
+// Container-executor.cfg should be read-only
+chmod 400 container-executor.cfg
+```
+
+### Set permission of container-executor:
+```
+chmod 6050 container-executor
+// Test whether configuration is correct
+container-executor --checksetup
+```
+
+## 5. Setting up cross-realm for distcp
+
+### Setup cross realm trust between realms
+Please look at [How to setup cross-realm](https://github.com/intel-bigdata/has/blob/cross-realm/doc/cross-realm.md).
+
+### Update core-site.xml
+
+Set hadoop.security.auth_to_local parameter in both clusters, add the following properties:
+```
+<!-- Set up cross realm between A.HADOOP.COM and B.HADOOP.COM -->
+<property>
+    <name>hadoop.security.auth_to_local</name>
+    <value> 
+        RULE:[1:$1@$0](.*@A.HADOOP.COM)s/@A.HADOOP.COM///L
+        RULE:[2:$1@$0](.*@A.HADOOP.COM)s/@A.HADOOP.COM///L
+        RULE:[1:$1@$0](.*@B.HADOOP.COM)s/@B.HADOOP.COM///L
+        RULE:[2:$1@$0](.*@B.HADOOP.COM)s/@B.HADOOP.COM///L
+    </value>
+</property>
+```
+
+For detailed mapping rules, please look at [Mapping Rule](https://www.cloudera.com/documentation/enterprise/5-9-x/topics/cdh_sg_kerbprin_to_sn.html).
+
+Test the mapping:
+```
+hadoop org.apache.hadoop.security.HadoopKerberosName hdfs/localhost@A.HADOOP.COM
+```
+
+### Update hdfs-site.xml
+add the following properties in client-side:
+```
+<!-- Control allowed realms to authenticate with -->
+<property>
+    <name>dfs.namenode.kerberos.principal.pattern</name>
+    <value>*</value>
+</property>
+```
+
+### Validate
+Test trust is setup by running hdfs commands from A.HADOOP.COM to B.HADOOP.COM, run the following command on the node of A.HADOOP.COM cluster:
+```
+hdfs dfs –ls hdfs://<NameNode_FQDN_for_B.HADOOP.COM_Cluster>:8020/
+```
+
+### Distcp between secure clusters
+
+Run the distcp command:
+```
+hadoop distcp hdfs://<Cluster_A_URI> hdfs://<Cluster_B_URI>
+```
+
+### Distcp between secure and insecure clusters
+
+Add the following properties in core-site.xml:
+```
+<property> 
+  <name>ipc.client.fallback-to-simple-auth-allowed</name>
+  <value>true</value>  
+</property>
+```
+
+Or run the distcp command with security setting:
+```
+hadoop distcp -D ipc.client.fallback-to-simple-auth-allowed=true hdfs://<Cluster_A_URI> hdfs://<Cluster_B_URI>
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/hadoop/hadoop-2.7.2.patch
----------------------------------------------------------------------
diff --git a/has/supports/hadoop/hadoop-2.7.2.patch b/has/supports/hadoop/hadoop-2.7.2.patch
new file mode 100644
index 0000000..336a83d
--- /dev/null
+++ b/has/supports/hadoop/hadoop-2.7.2.patch
@@ -0,0 +1,152 @@
+diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml
+index aa3c2c7..e4f1fd2 100644
+--- a/hadoop-common-project/hadoop-auth/pom.xml
++++ b/hadoop-common-project/hadoop-auth/pom.xml
+@@ -143,6 +143,11 @@
+       <artifactId>curator-test</artifactId>
+       <scope>test</scope>
+     </dependency>
++    <dependency>
++      <groupId>org.apache.hadoop</groupId>
++      <artifactId>has-client</artifactId>
++     <version>1.0.0-SNAPSHOT</version>
++    </dependency>
+   </dependencies>
+ 
+   <build>
+diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+index f7f5f63..80b7aca 100644
+--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
++++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+@@ -44,7 +44,8 @@
+   public static String getKrb5LoginModuleName() {
+     return System.getProperty("java.vendor").contains("IBM")
+       ? "com.ibm.security.auth.module.Krb5LoginModule"
+-      : "com.sun.security.auth.module.Krb5LoginModule";
++//      : "com.sun.security.auth.module.Krb5LoginModule";
++      :"org.apache.hadoop.has.client.HasLoginModule";
+   }
+   
+   public static Oid getOidInstance(String oidName) 
+diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+index 65e4166..f5224bb 100644
+--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
++++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+@@ -89,6 +89,8 @@
+   private static boolean shouldRenewImmediatelyForTests = false;
+   static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
+   static final String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
++  public static final String HADOOP_SECURITY_AUTHENTICATION_USE_HAS
++    = "hadoop.security.authentication.use.has";
+ 
+   /**
+    * For the purposes of unit tests, we want to test login
+@@ -460,6 +462,9 @@ public String toString() {
+       "hadoop-user-kerberos";
+     private static final String KEYTAB_KERBEROS_CONFIG_NAME = 
+       "hadoop-keytab-kerberos";
++    private static final String HAS_KERBEROS_CONFIG_NAME =
++      "hadoop-has-kerberos";
++
+ 
+     private static final Map<String, String> BASIC_JAAS_OPTIONS =
+       new HashMap<String,String>();
+@@ -516,6 +521,29 @@ public String toString() {
+       KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
+       KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);      
+     }
++
++    private static final Map<String, String> HAS_KERBEROS_OPTIONS =
++        new HashMap<String, String>();
++
++    static {
++      if (IBM_JAVA) {
++        HAS_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
++      } else {
++        HAS_KERBEROS_OPTIONS.put("doNotPrompt", "true");
++        HAS_KERBEROS_OPTIONS.put("useTgtTicket", "true");
++        HAS_KERBEROS_OPTIONS.put("hadoopSecurityHas", conf.get("hadoop.security.has"));
++      }
++      HAS_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
++    }
++
++    private static final AppConfigurationEntry HAS_KERBEROS_LOGIN =
++      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
++                                LoginModuleControlFlag.OPTIONAL,
++                                HAS_KERBEROS_OPTIONS);
++    private static final AppConfigurationEntry[] HAS_KERBEROS_CONF =
++      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HAS_KERBEROS_LOGIN,
++                                  HADOOP_LOGIN};
++
+     private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
+       new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
+                                 LoginModuleControlFlag.REQUIRED,
+@@ -546,6 +574,8 @@ public String toString() {
+         }
+         KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
+         return KEYTAB_KERBEROS_CONF;
++      } else if(HAS_KERBEROS_CONFIG_NAME.equals(appName)) {
++        return HAS_KERBEROS_CONF;
+       }
+       return null;
+     }
+@@ -792,9 +822,16 @@ static void loginUserFromSubject(Subject subject) throws IOException {
+       if (subject == null) {
+         subject = new Subject();
+       }
+-      LoginContext login =
+-          newLoginContext(authenticationMethod.getLoginAppName(), 
+-                          subject, new HadoopConfiguration());
++      LoginContext login = null;
++      if (authenticationMethod.equals(AuthenticationMethod.KERBEROS)
++        && conf.getBoolean(HADOOP_SECURITY_AUTHENTICATION_USE_HAS, false)) {
++        login = newLoginContext(HadoopConfiguration.HAS_KERBEROS_CONFIG_NAME,
++          subject, new HadoopConfiguration());
++      } else {
++        login = newLoginContext(authenticationMethod.getLoginAppName(),
++          subject, new HadoopConfiguration());
++      }
++
+       login.login();
+       UserGroupInformation realUser = new UserGroupInformation(subject);
+       realUser.setLogin(login);
+@@ -925,6 +962,39 @@ public void run() {
+       }
+     }
+   }
++
++  /**
++   * Log a user in from a tgt ticket.
++   * @throws IOException
++   */
++  @InterfaceAudience.Public
++  @InterfaceStability.Evolving
++  public synchronized
++  static void loginUserFromHas() throws IOException {
++    if (!isSecurityEnabled())
++      return;
++
++    Subject subject = new Subject();
++    LoginContext login;
++    long start = 0;
++    try {
++      login = newLoginContext(HadoopConfiguration.HAS_KERBEROS_CONFIG_NAME,
++            subject, new HadoopConfiguration());
++      start = Time.now();
++      login.login();
++      metrics.loginSuccess.add(Time.now() - start);
++      loginUser = new UserGroupInformation(subject);
++      loginUser.setLogin(login);
++      loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
++    } catch (LoginException le) {
++      if (start > 0) {
++        metrics.loginFailure.add(Time.now() - start);
++      }
++      throw new IOException("Login failure for " + le, le);
++    }
++    LOG.info("Login successful for user " + loginUser.getUserName());
++  }
++
+   /**
+    * Log a user in from a keytab file. Loads a user identity from a keytab
+    * file and logs them in. They become the currently logged-in user.

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/hbase/README.md
----------------------------------------------------------------------
diff --git a/has/supports/hbase/README.md b/has/supports/hbase/README.md
new file mode 100644
index 0000000..d55a35c
--- /dev/null
+++ b/has/supports/hbase/README.md
@@ -0,0 +1,154 @@
+Enable HBase
+===============
+
+## 1. Apply the patch to hadoop-2.5.1 source code
+```
+git apply hbase-1.1.10-hadoop-2.5.1.patch
+```
+
+## 2. Build
+```
+mvn clean package -DskipTests
+```
+
+## 3. Copy the hadoop-auth jar and hadoop-common jar to hbase lib
+```
+cp hadoop/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.5.1.jar $HBASE_HOME/lib/
+cp hadoop/hadoop-common-project/hadoop-common/target/hadoop-common-2.5.1.jar $HBASE_HOME/lib/
+```
+
+## 4. Update hbase security configuration
+
+### Update conf/hbase-site.xml
+```
+<property>
+  <name>hbase.security.authentication</name>
+  <value>kerberos</value> 
+</property>
+
+<property>
+  <name>hbase.rpc.engine</name>
+  <value>org.apache.hadoop.hbase.ipc.SecureRpcEngine</value>
+</property>
+
+<property> 
+  <name>hbase.regionserver.kerberos.principal</name> 
+  <value>hbase/_HOST@HADOOP.COM</value> 
+</property> 
+
+<property> 
+  <name>hbase.regionserver.keytab.file</name> 
+  <value>/path/to/hbase.keytab</value> 
+</property>
+
+<property> 
+  <name>hbase.master.kerberos.principal</name> 
+  <value>hbase/_HOST@HADOOP.COM</value> 
+</property> 
+
+<property> 
+  <name>hbase.master.keytab.file</name> 
+  <value>/path/to/hbase.keytab</value> 
+</property>
+```
+
+### Update /etc/hbase/conf/zk-jaas.conf
+```
+Client {
+      com.sun.security.auth.module.Krb5LoginModule required
+      useKeyTab=true
+      keyTab="/path/to/hbase.keytab"
+      storeKey=true
+      useTicketCache=false
+      principal="hbase/_HOST@HADOOP.COM";
+};
+```
+
+> Note "_HOST" should be replaced with the specific hostname.
+
+### Update conf/hbase-env.sh
+```
+export HBASE_OPTS="$HBASE_OPTS -Djava.security.auth.login.config=/etc/hbase/conf/zk-jaas.conf"
+export HBASE_MANAGES_ZK=false
+```
+
+### Update conf/hbase-site.xml on each HBase server host
+```
+<configuration>
+  <property>
+    <name>hbase.zookeeper.quorum</name>
+    <value>$ZK_NODES</value>
+  </property>
+   
+  <property>
+    <name>hbase.cluster.distributed</name>
+    <value>true</value>
+  </property>
+</configuration>
+```
+
+## 5. Update hadoop configuration to support JSVC instead of SASL
+
+### install jsvc for each host of hadoop cluster
+```
+sudo apt-get install jsvc
+```
+
+> Download commons-daemon-xxx.jar from  http://archive.apache.org/dist/commons/daemon/binaries/
+
+```
+export CLASSPATH=$CLASSPATH:/path/to/commons-daemon-xxx.jar
+```
+
+### Update hadoop/etc/hadoop/hadoop-env.sh
+```
+export HADOOP_SECURE_DN_USER=root
+export HADOOP_SECURE_DN_PID_DIR=$HADOOP_HOME/$DN_USER/pids
+export HADOOP_SECURE_DN_LOG_DIR=$HADOOP_HOME/$DN_USER/logs
+
+export JSVC_HOME=/usr/bin
+```
+
+### Disable https in hadoop/etc/hadoop/hdfs-site.xml
+
+***REMOVE*** following configurations
+```
+<!-- HTTPS config -->
+<property>
+  <name>dfs.http.policy</name>
+  <value>HTTPS_ONLY</value>
+</property>
+<property>
+  <name>dfs.data.transfer.protection</name>
+  <value>integrity</value>
+</property>
+```
+
+### Update hadoop/etc/hadoop/hdfs-site.xml
+```
+<property>
+    <name>dfs.datanode.address</name>
+    <value>0.0.0.0:1004</value> 
+</property>
+<property>
+    <name>dfs.datanode.http.address</name>
+    <value>0.0.0.0:1006</value>
+</property>
+```
+
+> The datanode ports range from 0 to 1023.
+
+## 6. Start hbase
+
+### Restart namenode and datanode in jsvc
+```
+sbin/stop-dfs.sh // stop hdfs first
+
+sbin/hadoop-daemon.sh start nameonode // start namenode
+sbin/start-secure-dns.sh // start datanode
+```
+
+### Start hbase
+```
+bin/start-hbase.sh
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
----------------------------------------------------------------------
diff --git a/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch b/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
new file mode 100644
index 0000000..bef04b4
--- /dev/null
+++ b/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
@@ -0,0 +1,136 @@
+diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+index ca0fce2..b43476d 100644
+--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
++++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+@@ -44,7 +44,8 @@
+   public static String getKrb5LoginModuleName() {
+     return System.getProperty("java.vendor").contains("IBM")
+       ? "com.ibm.security.auth.module.Krb5LoginModule"
+-      : "com.sun.security.auth.module.Krb5LoginModule";
++//      : "com.sun.security.auth.module.Krb5LoginModule";
++      :"org.apache.hadoop.has.client.HasLoginModule";
+   }
+   
+   public static Oid getOidInstance(String oidName) 
+diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+index 4f117fd..7a8fc43 100644
+--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
++++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+@@ -88,8 +88,10 @@
+   private static final float TICKET_RENEW_WINDOW = 0.80f;
+   static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
+   static final String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
+-  
+-  /** 
++  public static final String HADOOP_SECURITY_AUTHENTICATION_USE_HAS
++    = "hadoop.security.authentication.use.has";
++
++  /**
+    * UgiMetrics maintains UGI activity statistics
+    * and publishes them through the metrics interfaces.
+    */
+@@ -434,6 +436,8 @@ public String toString() {
+       "hadoop-user-kerberos";
+     private static final String KEYTAB_KERBEROS_CONFIG_NAME = 
+       "hadoop-keytab-kerberos";
++     private static final String HAS_KERBEROS_CONFIG_NAME =
++      "hadoop-has-kerberos";
+ 
+     private static final Map<String, String> BASIC_JAAS_OPTIONS =
+       new HashMap<String,String>();
+@@ -490,6 +494,29 @@ public String toString() {
+       KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
+       KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);      
+     }
++
++    private static final Map<String, String> HAS_KERBEROS_OPTIONS =
++        new HashMap<String, String>();
++
++    static {
++      if (IBM_JAVA) {
++        HAS_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
++      } else {
++        HAS_KERBEROS_OPTIONS.put("doNotPrompt", "true");
++        HAS_KERBEROS_OPTIONS.put("useTgtTicket", "true");
++        HAS_KERBEROS_OPTIONS.put("hadoopSecurityHas", conf.get("hadoop.security.has"));
++      }
++      HAS_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
++    }
++
++    private static final AppConfigurationEntry HAS_KERBEROS_LOGIN =
++      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
++                                LoginModuleControlFlag.OPTIONAL,
++                                HAS_KERBEROS_OPTIONS);
++    private static final AppConfigurationEntry[] HAS_KERBEROS_CONF =
++      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HAS_KERBEROS_LOGIN,
++                                  HADOOP_LOGIN};
++
+     private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
+       new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
+                                 LoginModuleControlFlag.REQUIRED,
+@@ -520,11 +547,45 @@ public String toString() {
+         }
+         KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
+         return KEYTAB_KERBEROS_CONF;
++      } else if(HAS_KERBEROS_CONFIG_NAME.equals(appName)) {
++        return HAS_KERBEROS_CONF;
+       }
+       return null;
+     }
+   }
+ 
++  /**
++   * Log a user in from a tgt ticket.
++   * @throws IOException
++   */
++  @InterfaceAudience.Public
++  @InterfaceStability.Evolving
++  public synchronized
++  static void loginUserFromHas() throws IOException {
++    if (!isSecurityEnabled())
++      return;
++
++    Subject subject = new Subject();
++    LoginContext login;
++    long start = 0;
++    try {
++      login = newLoginContext(HadoopConfiguration.HAS_KERBEROS_CONFIG_NAME,
++            subject, new HadoopConfiguration());
++      start = Time.now();
++      login.login();
++      metrics.loginSuccess.add(Time.now() - start);
++      loginUser = new UserGroupInformation(subject);
++      loginUser.setLogin(login);
++      loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
++    } catch (LoginException le) {
++      if (start > 0) {
++        metrics.loginFailure.add(Time.now() - start);
++      }
++      throw new IOException("Login failure for " + le, le);
++    }
++    LOG.info("Login successful for user " + loginUser.getUserName());
++  }
++
+   private static String prependFileAuthority(String keytabPath) {
+     return keytabPath.startsWith("file://") ? keytabPath
+         : "file://" + keytabPath;
+@@ -751,9 +812,16 @@ static void loginUserFromSubject(Subject subject) throws IOException {
+       if (subject == null) {
+         subject = new Subject();
+       }
+-      LoginContext login =
+-          newLoginContext(authenticationMethod.getLoginAppName(), 
+-                          subject, new HadoopConfiguration());
++      LoginContext login = null;
++      if (authenticationMethod.equals(AuthenticationMethod.KERBEROS)
++        && conf.getBoolean(HADOOP_SECURITY_AUTHENTICATION_USE_HAS, false)) {
++        login = newLoginContext(HadoopConfiguration.HAS_KERBEROS_CONFIG_NAME,
++          subject, new HadoopConfiguration());
++      } else {
++        login = newLoginContext(authenticationMethod.getLoginAppName(),
++          subject, new HadoopConfiguration());
++      }
++
+       login.login();
+       UserGroupInformation realUser = new UserGroupInformation(subject);
+       realUser.setLogin(login);

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/hive/README.md
----------------------------------------------------------------------
diff --git a/has/supports/hive/README.md b/has/supports/hive/README.md
new file mode 100644
index 0000000..2fa1195
--- /dev/null
+++ b/has/supports/hive/README.md
@@ -0,0 +1,55 @@
+Enable Hive
+==============
+
+## Hive on hdfs
+
+### 1. Enabling Kerberos Authentication for HiveServer2
+> Update hive-site.xml
+```
+<property>
+  <name>hive.server2.authentication</name>
+  <value>KERBEROS</value>
+</property>
+<property>
+  <name>hive.server2.authentication.kerberos.principal</name>
+  <value>hive/_HOST@HADOOP.COM</value>
+</property>
+<property>
+  <name>hive.server2.authentication.kerberos.keytab</name>
+  <value>/path/to/hive.keytab</value>
+</property>
+```
+
+### 2. Enable impersonation in HiveServer2
+> Update hive-site.xml
+```
+<property>
+  <name>hive.server2.enable.impersonation</name>
+  <description>Enable user impersonation for HiveServer2</description>
+  <value>true</value>
+</property>
+```
+
+> Update core-site.xml of hadoop
+```
+<property>
+  <name>hadoop.proxyuser.hive.hosts</name>
+  <value>*</value>
+</property>
+<property>
+  <name>hadoop.proxyuser.hive.groups</name>
+  <value>*</value>
+</property>
+```
+
+### 3. Start Hive
+> start sevice
+```
+hive --service metastore &
+hive --service hiveserver2 &
+```
+
+> start hive shell
+```
+hive
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/oozie/README.md
----------------------------------------------------------------------
diff --git a/has/supports/oozie/README.md b/has/supports/oozie/README.md
new file mode 100644
index 0000000..4760f97
--- /dev/null
+++ b/has/supports/oozie/README.md
@@ -0,0 +1,105 @@
+Enable Oozie
+===============
+
+## 1. Update oozie-site.xml
+add the following properties:
+```
+<property>
+  <name>oozie.service.AuthorizationService.security.enabled</name>
+  <value>true</value>
+  <description>Specifies whether security (user name/admin role) is enabled or not.
+   If it is disabled any user can manage the Oozie system and manage any job.</description>
+</property>
+
+<property>
+  <name>oozie.service.HadoopAccessorService.kerberos.enabled</name>
+  <value>true</value>
+</property>
+
+<property>
+  <name>local.realm</name>
+  <value>HADOOP.COM</value>
+  <description>HAS Realm.</description>
+</property>
+
+<property>
+  <name>oozie.service.HadoopAccessorService.keytab.file</name>
+  <value>/etc/oozie/conf/oozie.keytab</value>
+  <description>The keytab of the Oozie service.</description>
+</property>
+
+<property>
+  <name>oozie.service.HadoopAccessorService.kerberos.principal</name>
+  <value>oozie/_HOST@HADOOP.COM</value>
+  <description>Principal of Oozie service.</description>
+</property>
+
+<property>
+  <name>oozie.authentication.kerberos.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+  <description>Must use the hostname of the Oozie Server.</description>
+</property>
+
+<property>
+  <name>oozie.authentication.kerberos.keytab</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+  <description>Location of the hdfs keytab file which contains the HTTP principal.</description>
+</property>
+
+<property>
+  <name>oozie.authentication.type</name>
+  <value>kerberos</value>
+  <description></description>
+</property>
+
+<property>
+  <name>oozie.authentication.kerberos.name.rules</name>
+  <value>DEFAULT</value>
+  <description>The mapping from principal names to local service user names.</description>
+</property>
+```
+
+> Note "_HOST" should be replaced with the specific hostname.
+
+## 2. Start oozie
+```
+bin/oozied.sh start
+```
+
+## 3. Using kinit to get the credential cache
+
+## 4. Using the Oozie command line tool check the status of Oozie:
+```
+bin/oozie.sh admin -oozie http://<host>:11000/oozie -status
+```
+
+return:
+```
+System mode: NORMAL
+```
+
+## 5. Using the curl to check the status of Oozie:
+```
+curl -i --negotiate -u : "http://<host>:11000/oozie/v1/admin/status"
+```
+
+return:
+```
+HTTP/1.1 401 Unauthorized
+Server: Apache-Coyote/1.1
+WWW-Authenticate: Negotiate
+Set-Cookie: hadoop.auth=; Path=/; Expires=Thu, 01-Jan-1970 00:00:00 GMT; HttpOnly
+Content-Type: text/html;charset=utf-8
+Content-Length: 997
+Date: Wed, 28 Jun 2017 03:45:28 GMT
+
+HTTP/1.1 200 OK
+Server: Apache-Coyote/1.1
+WWW-Authenticate: Negotiate YGoGCSqGSIb3EgECAgIAb1swWaADAgEFoQMCAQ+iTTBLoAMCARGiRARCzCqLa8uqKUk6UlJfN02KC79DDFpStTBieqHBfhYEm6S1GyrP29Sr3hC4lYl4U42NFSwTb/ySjqu3EpOhBJo5Bg4h
+Set-Cookie: hadoop.auth="u=oozie&p=oozie/_HOST@EXAMPLE.COM&t=kerberos&e=1498657528799&s=waJ0DZ80kcA2Gc9pYMNIGsIAC5Y="; Path=/; Expires=Wed, 28-Jun-2017 13:45:28 GMT; HttpOnly
+Content-Type: application/json;charset=UTF-8
+Content-Length: 23
+Date: Wed, 28 Jun 2017 03:45:28 GMT
+
+{"systemMode":"NORMAL"}
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/phoenix/README.md
----------------------------------------------------------------------
diff --git a/has/supports/phoenix/README.md b/has/supports/phoenix/README.md
new file mode 100644
index 0000000..05755fb
--- /dev/null
+++ b/has/supports/phoenix/README.md
@@ -0,0 +1,30 @@
+Enable Phoenix
+=================
+
+## 1. Use SQLline to connect secure hbase
+```
+sqlline.py <zk_quorum>:<zk_port>:<zk_hbase_path>:<principal>:<keytab_file>
+// An example:
+sqlline.py localhost:2181:/hbase:hbase/localhost@EXAMPLE.COM:/home/hadoop/keytab/hbase.keytab
+```
+
+## 2. Configuring phoenix query server
+
+### Update hbase-site.xml
+add the following properties:
+```
+<property>
+    <name>phoenix.queryserver.kerberos.principal</name>
+    <value>hbase/_HOST@HADOOP.COM</value>
+</property>
+
+<property>
+    <name>phoenix.queryserver.keytab.file</name>
+    <value>/home/hadoop/keytab/hbase.keytab</value>
+</property>
+```
+
+### Start phoenix query server
+```
+queryserver.py start
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/presto/README.md
----------------------------------------------------------------------
diff --git a/has/supports/presto/README.md b/has/supports/presto/README.md
new file mode 100644
index 0000000..244efe6
--- /dev/null
+++ b/has/supports/presto/README.md
@@ -0,0 +1,24 @@
+Enable Presto
+================
+
+## 1. Hive Security Configuration
+Update catalog/hive.properties, Add the following properties:
+```
+<!-- Config to connect Kerberized hive metastore -->
+hive.metastore.authentication.type=KERBEROS
+hive.metastore.service.principal=hbase/_HOST@HADOOP.COM
+hive.metastore.client.principal=hbase/_HOST@HADOOP.COM
+hive.metastore.client.keytab=/path/to/hbase.keytab
+
+<!-- Config to connect kerberized hdfs -->
+hive.hdfs.authentication.type=KERBEROS
+hive.hdfs.presto.principal=hbase/_HOST@HADOOP.COM
+hive.hdfs.presto.keytab=/path/to/hbase.keytab
+```
+
+> Note "_HOST" should be replaced with the specific hostname.
+
+## 2. Restart presto server
+```
+/bin/launcher restart
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/spark/README.md
----------------------------------------------------------------------
diff --git a/has/supports/spark/README.md b/has/supports/spark/README.md
new file mode 100644
index 0000000..f08ce50
--- /dev/null
+++ b/has/supports/spark/README.md
@@ -0,0 +1,26 @@
+Enable Spark
+===============
+
+## 1. Update spark-env.sh
+```
+SPARK_HISTORY_OPTS=-Dspark.history.kerberos.enabled=true \
+-Dspark.history.kerberos.principal=<sp...@HADOOP.COM> \
+-Dspark.history.kerberos.keytab=<keytab>
+```
+
+> Note "_HOST" should be replaced with the specific hostname.
+
+## 2. Spark-submit job
+> YARN mode supported only
+```
+/bin/spark-submit \
+  --keytab <keytab> \ 
+  --principal <sp...@HADOOP.COM> \
+  --class <main-class>
+  --master <master-url> \
+  --deploy-mode <deploy-mode> \
+  --conf <key>=<value> \
+  ... # other options
+  <application-jar> \
+  <application-arguments>
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/spark/spark-v2.1.1.patch
----------------------------------------------------------------------
diff --git a/has/supports/spark/spark-v2.1.1.patch b/has/supports/spark/spark-v2.1.1.patch
new file mode 100644
index 0000000..c7e40b7
--- /dev/null
+++ b/has/supports/spark/spark-v2.1.1.patch
@@ -0,0 +1,51 @@
+diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+index 443f1f5..1fc66f0 100644
+--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
++++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+@@ -553,7 +553,9 @@ object SparkSubmit {
+ 
+     // assure a keytab is available from any place in a JVM
+     if (clusterManager == YARN || clusterManager == LOCAL) {
+-      if (args.principal != null) {
++      if (args.useHas) {
++        UserGroupInformation.loginUserFromHas()
++      } else if (args.principal != null) {
+         require(args.keytab != null, "Keytab must be specified when principal is specified")
+         if (!new File(args.keytab).exists()) {
+           throw new SparkException(s"Keytab file: ${args.keytab} does not exist")
+diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+index f1761e7..5e48419 100644
+--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
++++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+@@ -78,6 +78,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
+   var submissionToRequestStatusFor: String = null
+   var useRest: Boolean = true // used internally
+ 
++  var useHas: Boolean = false
++
+   /** Default properties present in the currently defined defaults file. */
+   lazy val defaultSparkProperties: HashMap[String, String] = {
+     val defaultProperties = new HashMap[String, String]()
+@@ -438,6 +440,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
+       case USAGE_ERROR =>
+         printUsageAndExit(1)
+ 
++      case USE_HAS =>
++        useHas = true
++
+       case _ =>
+         throw new IllegalArgumentException(s"Unexpected argument '$opt'.")
+     }
+diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
+index 6767cc5..49a7678 100644
+--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
++++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
+@@ -76,6 +76,8 @@ class SparkSubmitOptionParser {
+   protected final String PRINCIPAL = "--principal";
+   protected final String QUEUE = "--queue";
+ 
++  protected final String USE_HAS = "--use-has";
++
+   /**
+    * This is the canonical list of spark-submit options. Each entry in the array contains the
+    * different aliases for the same option; the first element of each entry is the "official"

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/thrift/README.md
----------------------------------------------------------------------
diff --git a/has/supports/thrift/README.md b/has/supports/thrift/README.md
new file mode 100644
index 0000000..db49d38
--- /dev/null
+++ b/has/supports/thrift/README.md
@@ -0,0 +1,70 @@
+Enable Thrift
+================
+
+## 1. Enable HBase thrift2 server
+
+### Update hbase-site.xml
+add the following properties:
+```
+<property>
+  <name>hbase.thrift.keytab.file</name>
+  <value>/etc/hbase/conf/hbase.keytab</value>
+</property>
+<property>
+  <name>hbase.thrift.kerberos.principal</name>
+  <value>hbase/_HOST@HADOOP.COM</value>
+</property>
+```
+
+### Restart HBase
+
+### Start thrift server
+```
+hbase thrift2 start
+```
+
+## 2. Write thrift client application
+Use keytab file to connect thrift server.
+An example of thrift client:
+```Java
+package com.example.thrifttest;
+
+import org.apache.hadoop.hbase.thrift.generated.Hbase;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+import java.io.IOException;
+
+public class Thrifttest {
+    static { 
+        final String principal = "hbase/hostname@HADOOP.COM";
+        final String keyTab = "/etc/hbase/conf/hbase.keytab";
+        try {
+            UserGroupInformation.loginUserFromKeytab(user, keyPath);
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private void start()  {
+        try {  
+            TTransport socket = new TSocket("192.168.x.xxx", 9090);
+            TProtocol protocol = new TBinaryProtocol(socket, true, true);
+            Hbase.Client client = new Hbase.Client(protocol);
+        } catch (TTransportException e) {  
+            e.printStackTrace();  
+        } catch (TException e) {  
+            e.printStackTrace();  
+        }
+    }
+
+    public static void main(String[] args) {
+        Thrifttest c = new Thrifttest();
+        c.start();
+    }
+}
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/zookeeper/README.md
----------------------------------------------------------------------
diff --git a/has/supports/zookeeper/README.md b/has/supports/zookeeper/README.md
new file mode 100644
index 0000000..edc7a0e
--- /dev/null
+++ b/has/supports/zookeeper/README.md
@@ -0,0 +1,59 @@
+Enable ZooKeeper
+===================
+
+## 1. Create the dependency jars
+```
+cd HAS/supports/zookeeper
+mvn clean package
+```
+
+## 2. Copy the jars to ZooKeeper lib directory
+```
+cp HAS/supports/zookeeper/lib/* $ZOOKEEPER_HOME/lib/
+```
+
+## 3. Copy the conf file to ZooKeeper conf directory
+```
+cp HAS/supports/zookeeper/conf/* $ZOOKEEPER_HOME/conf/
+```
+
+## 4. Update Zookeeper security configuration files
+> Update $ZOO_CONF_DIR/jaas.conf
+> Replace "_HOST" with the specific hostname for each host
+```
+Server {
+  com.sun.security.auth.module.Krb5LoginModule required
+  useKeyTab=true
+  keyTab="/path/to/zookeeper.keytab"
+  storeKey=true
+  useTicketCache=true
+  principal="zookeeper/_HOST@HADOOP.COM";
+};
+
+Client {
+  com.sun.security.auth.module.Krb5LoginModule required
+  useKeyTab=true
+  keyTab="/home/hdfs/keytab/hbase.keytab"
+  storeKey=true
+  useTicketCache=false
+  principal="zookeeper/_HOST@HADOOP.COM";
+};
+```
+
+> Update conf/zoo.cfg
+```
+authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider
+jaasLoginRenew=3600000
+kerberos.removeHostFromPrincipal=true
+kerberos.removeRealmFromPrincipal=true
+```
+
+## 5. Verifying the configuration
+```
+zkCli.sh -server hostname:port
+create /znode1 data sasl:zookeeper:cdwra
+getAcl /znode1
+```
+
+> The results from getAcl should show that the proper scheme and permissions were applied to the znode.    
+> like: 'sasl,'zookeeper

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/zookeeper/conf/jaas.conf
----------------------------------------------------------------------
diff --git a/has/supports/zookeeper/conf/jaas.conf b/has/supports/zookeeper/conf/jaas.conf
new file mode 100644
index 0000000..62db69a
--- /dev/null
+++ b/has/supports/zookeeper/conf/jaas.conf
@@ -0,0 +1,13 @@
+ Server {
+      com.sun.security.auth.module.Krb5LoginModule required
+      useKeyTab=true
+      keyTab="/etc/zookeeper/zookeeper.keytab"
+      storeKey=true
+      useTicketCache=true
+      principal="zookeeper/localhost@HADOOP.COM";
+  };
+
+Client {
+  org.apache.hadoop.has.client.HasLoginModule required
+  useTgtTicket=true;
+};

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/zookeeper/conf/java.env
----------------------------------------------------------------------
diff --git a/has/supports/zookeeper/conf/java.env b/has/supports/zookeeper/conf/java.env
new file mode 100644
index 0000000..bb7098b
--- /dev/null
+++ b/has/supports/zookeeper/conf/java.env
@@ -0,0 +1 @@
+export JVMFLAGS="-Djava.security.auth.login.config=$ZOOKEEPER_HOME/conf/jaas.conf"

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/supports/zookeeper/pom.xml
----------------------------------------------------------------------
diff --git a/has/supports/zookeeper/pom.xml b/has/supports/zookeeper/pom.xml
new file mode 100644
index 0000000..d2cdc13
--- /dev/null
+++ b/has/supports/zookeeper/pom.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>has-project</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>zookeeper-dist</artifactId>
+  <description>ZooKeeper dist</description>
+  <name>ZooKeeper dist</name>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>has-client</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>lib</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+</project>


[10/10] directory-kerby git commit: Add the HAS project to Kerby.

Posted by pl...@apache.org.
Add the HAS project to Kerby.


Project: http://git-wip-us.apache.org/repos/asf/directory-kerby/repo
Commit: http://git-wip-us.apache.org/repos/asf/directory-kerby/commit/be580566
Tree: http://git-wip-us.apache.org/repos/asf/directory-kerby/tree/be580566
Diff: http://git-wip-us.apache.org/repos/asf/directory-kerby/diff/be580566

Branch: refs/heads/has
Commit: be58056601b3c9b0ebf070899d53ce325071c635
Parents: 1e6d364
Author: plusplusjiajia <ji...@intel.com>
Authored: Wed Nov 15 13:12:22 2017 +0800
Committer: plusplusjiajia <ji...@intel.com>
Committed: Wed Nov 15 13:12:22 2017 +0800

----------------------------------------------------------------------
 has/LICENSE                                     | 201 ++++++
 has/README.md                                   |  93 +++
 has/build-tools/has-checkstyle.xml              | 150 ++++
 has/build-tools/has-pmd-ruleset.xml             |  39 ++
 has/doc/cross-realm.md                          |  73 ++
 has/doc/deploy-https.md                         | 152 ++++
 has/doc/deploy-spnego.md                        |  13 +
 has/doc/has-design.png                          | Bin 0 -> 70653 bytes
 has/doc/has-ha.md                               | 124 ++++
 has/doc/has-overall.png                         | Bin 0 -> 74116 bytes
 has/doc/has-start.md                            | 183 +++++
 has/doc/mysql-backend.md                        |  23 +
 has/doc/performance-report.md                   |  98 +++
 has/doc/rest-api.md                             | 229 ++++++
 has/has-client/pom.xml                          |  84 +++
 .../has/client/AbstractHasClientPlugin.java     |  44 ++
 .../hadoop/has/client/HasAdminClient.java       | 480 +++++++++++++
 .../hadoop/has/client/HasAuthAdminClient.java   | 553 +++++++++++++++
 .../org/apache/hadoop/has/client/HasClient.java | 677 ++++++++++++++++++
 .../hadoop/has/client/HasClientPlugin.java      |  42 ++
 .../has/client/HasClientPluginRegistry.java     |  63 ++
 .../hadoop/has/client/HasLoginException.java    |  37 +
 .../hadoop/has/client/HasLoginModule.java       | 491 +++++++++++++
 .../src/main/resources/ssl-client.conf.template |  20 +
 has/has-common/pom.xml                          |  67 ++
 .../org/apache/hadoop/has/common/HasAdmin.java  | 140 ++++
 .../org/apache/hadoop/has/common/HasConfig.java | 103 +++
 .../apache/hadoop/has/common/HasConfigKey.java  |  61 ++
 .../apache/hadoop/has/common/HasException.java  |  53 ++
 .../hadoop/has/common/spnego/AuthToken.java     | 217 ++++++
 .../has/common/spnego/AuthenticatedURL.java     | 282 ++++++++
 .../common/spnego/AuthenticationException.java  |  54 ++
 .../hadoop/has/common/spnego/Authenticator.java |  52 ++
 .../common/spnego/KerberosAuthenticator.java    | 359 ++++++++++
 .../common/spnego/KerberosHasAuthenticator.java |  25 +
 .../hadoop/has/common/spnego/KerberosUtil.java  | 262 +++++++
 .../hadoop/has/common/ssl/KeyStoresFactory.java | 254 +++++++
 .../common/ssl/ReloadingX509TrustManager.java   | 208 ++++++
 .../hadoop/has/common/ssl/SSLFactory.java       | 290 ++++++++
 .../has/common/ssl/SSLHostnameVerifier.java     | 615 ++++++++++++++++
 .../has/common/util/ConnectionConfigurator.java |  39 ++
 .../has/common/util/HasJaasLoginUtil.java       | 261 +++++++
 .../apache/hadoop/has/common/util/HasUtil.java  |  93 +++
 .../hadoop/has/common/util/PlatformName.java    |  59 ++
 .../hadoop/has/common/util/StringUtils.java     |  55 ++
 .../has/common/util/URLConnectionFactory.java   | 215 ++++++
 has/has-dist/assembly.xml                       |  56 ++
 has/has-dist/bin/hadmin-local.sh                |  54 ++
 has/has-dist/bin/hadmin-remote.sh               |  56 ++
 has/has-dist/bin/kdcinit.sh                     |  56 ++
 has/has-dist/bin/kinit.sh                       |  38 +
 has/has-dist/bin/klist.sh                       |  37 +
 has/has-dist/bin/login-test.sh                  |  34 +
 has/has-dist/bin/quick-start.sh                 |  68 ++
 has/has-dist/bin/start-has.sh                   | 115 +++
 has/has-dist/bin/stop-has.sh                    |  75 ++
 has/has-dist/conf/backend.conf                  |  23 +
 has/has-dist/conf/hadmin.conf                   |   6 +
 has/has-dist/conf/has-env.sh                    |  29 +
 has/has-dist/conf/has-server.conf               |  27 +
 has/has-dist/conf/kdc.conf                      |  23 +
 has/has-dist/conf/krb5.conf                     |  29 +
 has/has-dist/log4j.properties                   |  27 +
 has/has-dist/pom.xml                            |  91 +++
 has/has-dist/webapps/WEB-INF/web.xml            |  17 +
 has/has-dist/webapps/has/index.html             |  24 +
 has/has-plugins/pom.xml                         |  48 ++
 ...org.apache.hadoop.has.client.HasClientPlugin |  16 +
 ...org.apache.hadoop.has.server.HasServerPlugin |  16 +
 .../plugins/TestHasClientPluginRegistry.java    |  44 ++
 .../plugins/TestHasServerPluginRegistry.java    |  43 ++
 has/has-server/pom.xml                          | 118 ++++
 .../has/server/AbstractHasServerPlugin.java     |  45 ++
 .../hadoop/has/server/HasAuthenException.java   |  37 +
 .../org/apache/hadoop/has/server/HasServer.java | 701 +++++++++++++++++++
 .../hadoop/has/server/HasServerPlugin.java      |  39 ++
 .../has/server/HasServerPluginRegistry.java     |  63 ++
 .../hadoop/has/server/admin/LocalHasAdmin.java  | 382 ++++++++++
 .../hadoop/has/server/kdc/HasKdcHandler.java    | 315 +++++++++
 .../hadoop/has/server/kdc/MySQLConfKey.java     |  52 ++
 .../has/server/kdc/MySQLIdentityBackend.java    | 426 +++++++++++
 .../hadoop/has/server/web/ConfFilter.java       |  54 ++
 .../hadoop/has/server/web/HostRoleType.java     |  55 ++
 .../hadoop/has/server/web/WebConfigKey.java     |  62 ++
 .../apache/hadoop/has/server/web/WebServer.java | 348 +++++++++
 .../hadoop/has/server/web/rest/ConfApi.java     | 196 ++++++
 .../hadoop/has/server/web/rest/HadminApi.java   | 455 ++++++++++++
 .../hadoop/has/server/web/rest/HasApi.java      | 336 +++++++++
 .../server/web/rest/param/AuthTokenParam.java   |  45 ++
 .../has/server/web/rest/param/EnumParam.java    |  51 ++
 .../has/server/web/rest/param/HostParam.java    |  45 ++
 .../server/web/rest/param/HostRoleParam.java    |  45 ++
 .../hadoop/has/server/web/rest/param/Param.java | 123 ++++
 .../server/web/rest/param/PasswordParam.java    |  45 ++
 .../server/web/rest/param/PrincipalParam.java   |  45 ++
 .../has/server/web/rest/param/StringParam.java  |  68 ++
 .../has/server/web/rest/param/TypeParam.java    |  48 ++
 .../src/main/resources/backend.conf.template    |  21 +
 .../src/main/resources/kdc.conf.template        |  23 +
 .../src/main/resources/krb5.conf.template       |  29 +
 .../hadoop/has/server/TestHasWebServer.java     | 128 ++++
 .../hadoop/has/server/TestRestApiBase.java      | 336 +++++++++
 .../org/apache/hadoop/has/server/TestUtil.java  | 368 ++++++++++
 .../hadoop/has/server/json/TestJsonConfApi.java |  83 +++
 .../has/server/json/TestJsonHadminApi.java      |  80 +++
 .../hadoop/has/server/json/TestJsonHasApi.java  |  54 ++
 .../has/server/mysql/TestMySQLConfApi.java      |  70 ++
 .../has/server/mysql/TestMySQLHadminApi.java    |  64 ++
 .../has/server/mysql/TestMySQLHasApi.java       |  46 ++
 .../src/test/resources/conf/backend.conf        |  20 +
 .../src/test/resources/conf/has-server.conf     |  25 +
 has/has-server/src/test/resources/conf/kdc.conf |  23 +
 .../src/test/resources/conf/krb5.conf           |  29 +
 .../src/test/resources/webapps/WEB-INF/web.xml  |  17 +
 .../src/test/resources/webapps/has/index.html   |  24 +
 has/has-tool/has-client-tool/pom.xml            |  33 +
 .../client/hadmin/remote/HadminRemoteTool.java  | 164 +++++
 .../hadmin/remote/cmd/HadminRemoteCmd.java      |  49 ++
 .../remote/cmd/HasRemoteAddPrincipalCmd.java    |  70 ++
 .../cmd/HasRemoteCreatePrincipalsCmd.java       |  82 +++
 .../remote/cmd/HasRemoteDeletePrincipalCmd.java |  89 +++
 .../remote/cmd/HasRemoteDisableConfCmd.java     |  49 ++
 .../remote/cmd/HasRemoteEnableConfCmd.java      |  49 ++
 .../remote/cmd/HasRemoteExportKeytabsCmd.java   |  58 ++
 .../remote/cmd/HasRemoteGetHostRolesCmd.java    |  68 ++
 .../remote/cmd/HasRemoteGetPrincipalsCmd.java   |  76 ++
 .../remote/cmd/HasRemoteRenamePrincipalCmd.java |  91 +++
 .../tool/client/hclient/HasClientLoginTool.java | 269 +++++++
 .../has/tool/client/kdcinit/HasInitTool.java    | 132 ++++
 .../kdcinit/cmd/HasConfKdcBackendCmd.java       |  66 ++
 .../tool/client/kdcinit/cmd/HasConfKdcCmd.java  |  54 ++
 .../client/kdcinit/cmd/HasGetHasconfCmd.java    |  77 ++
 .../client/kdcinit/cmd/HasGetKrb5confCmd.java   |  77 ++
 .../tool/client/kdcinit/cmd/HasInitKdcCmd.java  |  94 +++
 .../client/kdcinit/cmd/HasSetPluginCmd.java     |  53 ++
 .../tool/client/kdcinit/cmd/HasStartKdcCmd.java |  49 ++
 .../has/tool/client/kdcinit/cmd/KdcInitCmd.java |  42 ++
 .../has/tool/client/kinit/KinitOption.java      |  88 +++
 .../hadoop/has/tool/client/kinit/KinitTool.java | 384 ++++++++++
 .../has/tool/client/klist/KlistOption.java      |  66 ++
 .../hadoop/has/tool/client/klist/KlistTool.java | 293 ++++++++
 has/has-tool/has-server-tool/pom.xml            |  38 +
 .../server/hadmin/local/HadminLocalTool.java    | 265 +++++++
 .../hadmin/local/cmd/AddPrincipalCmd.java       |  61 ++
 .../hadmin/local/cmd/AddPrincipalsCmd.java      |  78 +++
 .../hadmin/local/cmd/DeletePrincipalCmd.java    |  80 +++
 .../hadmin/local/cmd/DisableConfigureCmd.java   |  40 ++
 .../hadmin/local/cmd/EnableConfigureCmd.java    |  40 ++
 .../hadmin/local/cmd/ExportKeytabsCmd.java      |  57 ++
 .../hadmin/local/cmd/GetHostRolesCmd.java       |  36 +
 .../hadmin/local/cmd/GetPrincipalCmd.java       |  76 ++
 .../tool/server/hadmin/local/cmd/HadminCmd.java |  42 ++
 .../server/hadmin/local/cmd/KeytabAddCmd.java   |  91 +++
 .../hadmin/local/cmd/ListPrincipalsCmd.java     |  63 ++
 .../hadmin/local/cmd/RenamePrincipalCmd.java    |  82 +++
 has/has-tool/pom.xml                            |  23 +
 has/pom.xml                                     | 128 ++++
 has/supports/hadoop/README.md                   | 339 +++++++++
 has/supports/hadoop/hadoop-2.7.2.patch          | 152 ++++
 has/supports/hbase/README.md                    | 154 ++++
 .../hbase/hbase-1.1.10-hadoop-2.5.1.patch       | 136 ++++
 has/supports/hive/README.md                     |  55 ++
 has/supports/oozie/README.md                    | 105 +++
 has/supports/phoenix/README.md                  |  30 +
 has/supports/presto/README.md                   |  24 +
 has/supports/spark/README.md                    |  26 +
 has/supports/spark/spark-v2.1.1.patch           |  51 ++
 has/supports/thrift/README.md                   |  70 ++
 has/supports/zookeeper/README.md                |  59 ++
 has/supports/zookeeper/conf/jaas.conf           |  13 +
 has/supports/zookeeper/conf/java.env            |   1 +
 has/supports/zookeeper/pom.xml                  |  47 ++
 172 files changed, 19408 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/LICENSE
----------------------------------------------------------------------
diff --git a/has/LICENSE b/has/LICENSE
new file mode 100644
index 0000000..8dada3e
--- /dev/null
+++ b/has/LICENSE
@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "{}"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright {yyyy} {name of copyright owner}
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/README.md
----------------------------------------------------------------------
diff --git a/has/README.md b/has/README.md
new file mode 100644
index 0000000..f06b393
--- /dev/null
+++ b/has/README.md
@@ -0,0 +1,93 @@
+# Hadoop Authentication Service (HAS)
+A dedicated Hadoop Authentication Server to support various authentication mechanisms other than just Kerberos. In its core it leverages a Kerby KDC developed by [Apache Kerby](https://github.com/apache/directory-kerby), a sub project of [Apache Directory](http://directory.apache.org).
+
+# High level considerations
+* Hadoop services are still strongly authenticated by Kerberos, as Kerberos is the only means so far to enable Hadoop security.
+* Hadoop users can remain to use their familiar login methods.
+* Security admins won't have to migrate and sync up their user accounts to Kerberos back and forth.
+* New authentication mechanism can be customized and plugined.
+
+# Architecture
+![](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/has-overall.png)
+
+# Design
+Assuming existing users are stored in a SQL database (like MySQL), the detailed design and workflow may go like the following:
+![](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/has-design.png)
+
+
+# New mechanism plugin API
+
+## HAS client plugin HasClientPlugin:
+
+```Java
+// Get the login module type ID, used to distinguish this module from others. 
+// Should correspond to the server side module.
+String getLoginType()
+
+// Perform all the client side login logics, the results wrapped in an AuthToken, 
+// will be validated by HAS server.
+AuthToken login(Conf loginConf) throws HasLoginException
+```
+
+## HAS server plugin HasServerPlugin:
+
+```Java
+// Get the login module type ID, used to distinguish this module from others. 
+// Should correspond to the client side module.
+String getLoginType()
+
+// Perform all the server side authentication logics, the results wrapped in an AuthToken, 
+// will be used to exchange a Kerberos ticket.
+AuthToken authenticate(AuthToken userToken) throws HasAuthenException
+```
+
+## REST API
+Please look at [REST API](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/rest-api.md) for details.
+
+## How to start
+Please look at [How to start](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/has-start.md) for details.
+
+## High Availability
+Please look at [High Availability](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/has-ha.md) for details.
+
+## Cross Realm
+Please look at [How to setup cross-realm](https://github.com/intel-bigdata/has/blob/cross-realm/doc/cross-realm.md) for details.
+
+## Enable Hadoop ecosystem components
+
+* [Enable Hadoop](https://github.com/Intel-bigdata/HAS/blob/release-1.0.0/supports/hadoop/README.md)
+
+* [Enable Zookeeper](https://github.com/Intel-bigdata/HAS/blob/release-1.0.0/supports/zookeeper/README.md)
+
+* [Enable HBase](https://github.com/Intel-bigdata/HAS/blob/release-1.0.0/supports/hbase/README.md)
+
+* [Enable Hive](https://github.com/Intel-bigdata/HAS/blob/release-1.0.0/supports/hive/README.md)
+
+* [Enable Phoenix](https://github.com/Intel-bigdata/HAS/blob/release-1.0.0/supports/phoenix/README.md)
+
+* [Enable Thrift](https://github.com/Intel-bigdata/HAS/blob/release-1.0.0/supports/thrift/README.md)
+
+* [Enable Spark](https://github.com/Intel-bigdata/HAS/blob/release-1.0.0/supports/spark/README.md)
+
+* [Enable Oozie](https://github.com/Intel-bigdata/HAS/blob/release-1.0.0/supports/oozie/README.md)
+
+* [Enable Presto](https://github.com/Intel-bigdata/HAS/blob/release-1.0.0/supports/presto/README.md)
+
+## List of supported Hadoop ecosystem components
+
+|   Big Data Components   |           Supported         |   Rebuild Required   |   Configuring Required   |
+|:-----------------------:|:---------------------------:|:--------------------:|:------------------------:|
+| Hadoop                  | Yes                         | Yes                  | Yes                      |
+| Zookeeper               | Yes                         | Yes                  | Yes                      |
+| HBase                   | Yes                         | Yes                  | Yes                      |
+| Hive                    | Yes                         | No                   | Yes                      |
+| Phoenix                 | Yes                         | No                   | Yes                      |
+| Thrift                  | Yes                         | No                   | Yes                      |
+| Spark                   | Yes                         | No                   | Yes                      |
+| Oozie                   | Yes                         | No                   | Yes                      |
+| Presto                  | Yes (0.148 and later)       | No                   | Yes                      |
+| Pig                     | Yes                         | No                   | No                       |
+| Sqoop                   | Yes                         | No                   | No                       |
+
+## Performance test report
+Please look at [Performance test report](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/performance-report.md) for details.

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/build-tools/has-checkstyle.xml
----------------------------------------------------------------------
diff --git a/has/build-tools/has-checkstyle.xml b/has/build-tools/has-checkstyle.xml
new file mode 100644
index 0000000..6ab54b0
--- /dev/null
+++ b/has/build-tools/has-checkstyle.xml
@@ -0,0 +1,150 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+  
+  http://www.apache.org/licenses/LICENSE-2.0
+  
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+<!DOCTYPE module PUBLIC
+    "-//Puppy Crawl//DTD Check Configuration 1.2//EN"
+    "http://www.puppycrawl.com/dtds/configuration_1_2.dtd">
+
+<module name="Checker">
+
+    <!-- Checks that property files contain the same keys.         -->
+    <!-- See http://checkstyle.sf.net/config_misc.html#Translation -->
+    <module name="Translation"/>
+
+    <module name="FileLength"/>
+
+    <!-- Following interprets the header file as regular expressions. -->
+    <!-- <module name="RegexpHeader"/>                                -->
+
+    <module name="FileTabCharacter">
+        <property name="eachLine" value="true"/>
+    </module>
+
+    <module name="RegexpSingleline">
+        <!-- \s matches whitespace character, $ matches end of line. -->
+        <property name="format" value="^[^\s][^\*].*\s+$"/>
+        <property name="message" value="Line has trailing spaces."/>
+    </module>
+
+    <module name="TreeWalker">
+
+        <!-- required for SuppressWarningsFilter (and other Suppress* rules not used here) -->
+        <!-- see http://checkstyle.sourceforge.net/config_annotation.html#SuppressWarningsHolder -->
+        <module name="SuppressWarningsHolder"/>
+
+
+        <!-- Checks for Naming Conventions.                  -->
+        <!-- See http://checkstyle.sf.net/config_naming.html -->
+        <module name="ConstantName"/>
+        <module name="LocalFinalVariableName"/>
+        <module name="LocalVariableName"/>
+        <module name="MemberName"/>
+        <module name="MethodName"/>
+        <module name="PackageName"/>
+        <module name="ParameterName"/>
+        <module name="StaticVariableName"/>
+        <module name="TypeName"/>
+
+
+        <!-- Checks for imports                              -->
+        <!-- See http://checkstyle.sf.net/config_import.html -->
+        <!-- module name="AvoidStarImport"/ -->
+        <!--module name="IllegalImport"/--> <!-- defaults to sun.* packages -->
+        <module name="RedundantImport"/>
+        <module name="UnusedImports"/>
+
+
+        <!-- Checks for Size Violations.                    -->
+        <!-- See http://checkstyle.sf.net/config_sizes.html -->
+        <module name="LineLength">
+            <property name="max" value="120"/>
+            <property name="ignorePattern" value="^ *\* *"/>
+        </module>
+        <module name="MethodLength">
+            <property name="max" value="200"/>
+        </module>
+        <module name="ParameterNumber">
+            <property name="max" value="8"/>
+        </module>
+
+
+        <!-- Checks for whitespace                               -->
+        <!-- See http://checkstyle.sf.net/config_whitespace.html -->
+        <module name="EmptyForIteratorPad"/>
+        <module name="MethodParamPad"/>
+        <module name="NoWhitespaceAfter"/>
+        <module name="NoWhitespaceBefore"/>
+        <module name="OperatorWrap"/>
+        <module name="ParenPad"/>
+        <module name="TypecastParenPad"/>
+        <module name="WhitespaceAfter"/>
+        <module name="WhitespaceAround"/>
+
+
+        <!-- Modifier Checks                                    -->
+        <!-- See http://checkstyle.sf.net/config_modifiers.html -->
+        <module name="ModifierOrder"/>
+        <module name="RedundantModifier"/>
+
+
+        <!-- Checks for blocks. You know, those {}'s         -->
+        <!-- See http://checkstyle.sf.net/config_blocks.html -->
+        <module name="AvoidNestedBlocks"/>
+        <module name="EmptyBlock"/>
+        <module name="LeftCurly"/>
+        <module name="NeedBraces"/>
+        <module name="RightCurly"/>
+
+
+        <!-- Checks for common coding problems               -->
+        <!-- See http://checkstyle.sf.net/config_coding.html -->
+        <!-- module name="AvoidInlineConditionals"/ -->
+        <module name="EmptyStatement"/>
+        <module name="EqualsHashCode"/>
+        <module name="IllegalInstantiation"/>
+
+
+        <!-- Checks for class design                         -->
+        <!-- See http://checkstyle.sf.net/config_design.html -->
+        <module name="FinalClass"/>
+        <module name="InterfaceIsType"/>
+
+
+        <!-- Miscellaneous other checks.                   -->
+        <!-- See http://checkstyle.sf.net/config_misc.html -->
+        <module name="ArrayTypeStyle"/>
+        <module name="UpperEll"/>
+
+        <module name="FileContentsHolder"/>
+
+    </module>
+
+    <!-- Support @SuppressWarnings (added in Checkstyle 5.7) -->
+    <!-- see http://checkstyle.sourceforge.net/config.html#SuppressWarningsFilter -->
+    <module name="SuppressWarningsFilter"/>
+
+    <!-- Checks properties file for a duplicated properties. -->
+    <!-- See http://checkstyle.sourceforge.net/config_misc.html#UniqueProperties -->
+    <module name="UniqueProperties"/>
+
+    <module name="SuppressionCommentFilter"/>
+
+</module>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/build-tools/has-pmd-ruleset.xml
----------------------------------------------------------------------
diff --git a/has/build-tools/has-pmd-ruleset.xml b/has/build-tools/has-pmd-ruleset.xml
new file mode 100644
index 0000000..772e0e3
--- /dev/null
+++ b/has/build-tools/has-pmd-ruleset.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+<ruleset name="kerby-pmd" xmlns="http://pmd.sourceforge.net/ruleset/2.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd">
+  <description>
+  A PMD ruleset for Apache Kerby
+  </description>
+
+  <rule ref="rulesets/java/basic.xml">
+     <exclude name="AvoidUsingHardCodedIP" />
+  </rule>
+  <rule ref="rulesets/java/unusedcode.xml"/>
+  <rule ref="rulesets/java/imports.xml"/>
+  <rule ref="rulesets/java/braces.xml"/>
+  <rule ref="rulesets/java/empty.xml"/>
+  <rule ref="rulesets/java/migrating.xml">
+     <exclude name="JUnit4TestShouldUseAfterAnnotation" /> 
+  </rule>
+  <rule ref="rulesets/java/unnecessary.xml"/>
+
+</ruleset>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/doc/cross-realm.md
----------------------------------------------------------------------
diff --git a/has/doc/cross-realm.md b/has/doc/cross-realm.md
new file mode 100644
index 0000000..8c1fb36
--- /dev/null
+++ b/has/doc/cross-realm.md
@@ -0,0 +1,73 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+Establish cross realm trust
+============
+
+### Synchronize time of realms
+The time of realms should be synchronized.
+
+### Add the same special principals in realms, please select a very strong password
+```
+cd kerby-dist/kdc-dist
+sh bin/kadmin.sh [server-conf-dir] -k [keytab]
+// A.EXAMPLE.COM realm to access a service in the B.EXAMPLE.COM realm
+HadminLocalTool.local: addprinc -pw [same-password] krbtgt/B.EXAMPLE.COM@A.EXAMPLE.COM
+// Make sure that both principals have matching key version numbers and encryption types
+HadminLocalTool.local: getprinc krbtgt/B.EXAMPLE.COM@A.EXAMPLE.COM
+```
+
+### Configure krb5.conf of realms
+
+* config realms and domain_realms sections, make sure the realms are contained.
+
+* config capaths section, which contains the realm chain.
+
+An example of krb5.conf:
+```
+[realms]
+  A.EXAMPLE.COM = {
+    kdc = A.EXAMPLE.COM
+  }
+  B.EXAMPLE.COM = {
+    kdc = B.EXAMPLE.COM
+  }
+
+[domain_realm]
+  .A.EXAMPLE.COM = a.example.com
+  A.EXAMPLE.COM = a.example.com
+  .B.EXAMPLE.COM = b.example.com
+  B.EXAMPLE.COM = b.example.com
+
+[capaths]
+  A.EXAMPLE.COM = {
+    B.EXAMPLE.COM = .
+  }
+  B.EXAMPLE.COM = {
+    A.EXAMPLE.COM = .
+  }
+```
+
+> Make sure the FQDN match the realm name, e.g. if the FQDN is localhost.hadoop.com, the realm should be HADOOP.COM.
+
+### Validate
+```
+cd kerby-dist/tool-dist
+sh bin/kinit.sh -conf [client-conf-dir] -c [credential-cache-of-local-realm] -S [principal-name-of-remote-realm]
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/doc/deploy-https.md
----------------------------------------------------------------------
diff --git a/has/doc/deploy-https.md b/has/doc/deploy-https.md
new file mode 100644
index 0000000..d2ed6a0
--- /dev/null
+++ b/has/doc/deploy-https.md
@@ -0,0 +1,152 @@
+Deploy HTTPS
+===============
+
+## 1. Create a keystore file for each host
+
+> keystore: the keystore file that stores the certificate.      
+> validity: the valid time of the certificate in days.
+```
+keytool -alias {hostname} -keystore {keystore} -validity {validity} -genkey
+```
+
+> The keytool will ask for more details such as the keystore password, keypassword and CN(hostname).
+
+## 2. Export the certificate public key to a certificate file for each host
+```
+keytool -export -alias {hostname} -keystore {keystore} -rfc -file {cert-file}
+```
+
+## 3. Create a common truststore file (trustAll)
+The truststore file contains the public key from all certificates. If you assume a 2-node cluster with node1 and node2,
+login to node1 and import the truststore file for node1.
+```
+keytool -import -alias {hostname} -keystore {trustAll} -file {cert-file}
+```
+
+## 4. Update the common truststore file
+* Move {trustAll} from node1 to node2 ({trustAll} already has the certificate entry of node1), and repeat Step 3.
+
+* Move the updated {trustAll} from node2 to node1. Repeat these steps for each node in the cluster.
+When you finish, the {trustAll} file will have the certificates from all nodes.
+
+> Note these work could be done on the same node, just notice the hostname.
+
+## 5. Copy {trustAll} from node1 to all of the other nodes
+
+## 6. Validate the common truststore file
+```
+keytool -list -v -keystore {trustAll}
+```
+
+## 7. Edit the Configuration files
+> Deploy {keystore} and {trustAll} files and config /etc/has/ssl-server.conf for HAS server
+```
+ssl.server.keystore.location = {path to keystore}
+ssl.server.keystore.password = {keystore password set in step 1}
+ssl.server.keystore.keypassword = {keypassword set in step 1}
+ssl.server.truststore.reload.interval = 1000
+ssl.server.truststore.location = {path to trustAll}
+ssl.server.truststore.password = {trustAll password set in step 2}
+```
+
+> Config /etc/has/ssl-client.conf for HAS client
+```
+ssl.client.truststore.location = {path to trustAll}
+ssl.client.truststore.password = {trustAll password}
+```
+
+> Config $HADOOP_HOME/etc/hadoop/ssl-server.xml for Hadoop
+```
+<configuration>
+
+<property>
+  <name>ssl.server.truststore.location</name>
+  <value>path to trustAll</value>
+</property>
+
+<property>
+  <name>ssl.server.truststore.password</name>
+  <value>trustAll password</value>
+</property>
+
+<property>
+  <name>ssl.server.truststore.type</name>
+  <value>jks</value>
+</property>
+
+<property>
+  <name>ssl.server.truststore.reload.interval</name>
+  <value>10000</value>
+</property>
+
+<property>
+  <name>ssl.server.keystore.location</name>
+  <value>path to keystore</value>
+</property>
+
+<property>
+  <name>ssl.server.keystore.password</name>
+  <value>keystore password</value>
+</property>
+
+<property>
+  <name>ssl.server.keystore.keypassword</name>
+  <value>keystore keypassword</value>
+</property>
+
+<property>
+  <name>ssl.server.keystore.type</name>
+  <value>jks</value>
+</property>
+
+</configuration>
+```
+
+> Config $HADOOP_HOME/etc/hadoop/ssl-client.xml for Hadoop
+```
+<configuration>
+
+<property>
+  <name>ssl.client.truststore.location</name>
+  <value>patch to trustAll</value>
+</property>
+
+<property>
+  <name>ssl.client.truststore.password</name>
+  <value>trustAll password</value>
+</property>
+
+<property>
+  <name>ssl.client.truststore.type</name>
+  <value>jks</value>
+</property>
+
+<property>
+  <name>ssl.client.truststore.reload.interval</name>
+  <value>10000</value>
+</property>
+
+<property>
+  <name>ssl.client.keystore.location</name>
+  <value>path to keystore</value>
+</property>
+
+<property>
+  <name>ssl.client.keystore.password</name>
+  <value>keystore password</value>
+</property>
+
+<property>
+  <name>ssl.client.keystore.keypassword</name>
+  <value>keystore keypassword</value>
+</property>
+
+<property>
+  <name>ssl.client.keystore.type</name>
+  <value>jks</value>
+</property>
+
+</configuration>
+```
+
+> To make the nodes in the cluster communicate bidirectionally, deploy all the configuration files.

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/doc/deploy-spnego.md
----------------------------------------------------------------------
diff --git a/has/doc/deploy-spnego.md b/has/doc/deploy-spnego.md
new file mode 100644
index 0000000..cb2339b
--- /dev/null
+++ b/has/doc/deploy-spnego.md
@@ -0,0 +1,13 @@
+Deploy SPNEGO
+================
+
+## 1. Server Side Configuration(in server side has-server.conf)
+
+To use Kerberos SPNEGO as the authentication mechanism, the authentication filter must be configured with the following init parameters:
+- filter_auth_type : the keyword kerberos. For example: filter_auth_type = kerberos
+
+## 2. Client Side Configuration(in client side hadmin.conf)
+
+- filter_auth_type the keyword kerberos.  For example: filter_auth_type = kerberos
+- admin_keytab: The path to the keytab file containning the credential for the admin principal. For example: admin_keytab = /etc/has/admin.keytab Here the admin.keytab the same as the server side.
+- admin_keytab_principal: The admin principal. For example: admin_keytab_principal = kadmin/<YOUR-REALM.COM>@<YOUR-REALM.COM>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/doc/has-design.png
----------------------------------------------------------------------
diff --git a/has/doc/has-design.png b/has/doc/has-design.png
new file mode 100644
index 0000000..575c503
Binary files /dev/null and b/has/doc/has-design.png differ

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/doc/has-ha.md
----------------------------------------------------------------------
diff --git a/has/doc/has-ha.md b/has/doc/has-ha.md
new file mode 100644
index 0000000..b683878
--- /dev/null
+++ b/has/doc/has-ha.md
@@ -0,0 +1,124 @@
+High Availability Using MySQL Backend
+========================================
+
+The HAS High Availability feature implemented by providing the option of running two redundant HAS servers. 
+
+## Deployment
+
+### 1. Configure has-server.conf
+
+The two redundant HAS servers must have same https ports. Below are examples:
+
+* has-server.conf of HAS server on emr-header-1:
+```
+[HAS]
+  https_host = emr-header-1
+  https_port = 8092
+  filter_auth_type = kerberos
+  enable_conf = true
+
+[PLUGIN]
+  auth_type = RAM
+```
+
+* has-server.conf of HAS server on emr-worker-1:
+```
+[HAS]
+  https_host = emr-worker-1
+  https_port = 8092
+  filter_auth_type = kerberos
+  enable_conf = true
+
+[PLUGIN]
+  auth_type = RAM
+```
+
+### 2. Start HAS servers
+
+### 3. Configure HAS backend
+
+The two redundant HAS servers must use **mysql** backend, and have same *mysql_url*, *mysql_user* and *mysql_password*.
+
+Please look at [How to use mysql backend](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/mysql-backend.md) for mysql backend configuration.
+
+### 4. Configure HAS KDC
+
+The two redundant HAS servers must have same ports and realms.
+
+### 5. Start and init HAS KDC servers
+
+> After doing init on either HAS server, the other one has been initialized too.
+>
+> Please keep the shared admin.keytab safely.
+
+### 6. Reexport has-client.conf for HAS web server HA
+
+```
+cd HAS/has-dist
+// Start KDC init tool
+sh bin/kdcinit.sh <conf_dir>
+// Get has-client.conf, and put it to /etc/has:
+KdcInitTool: gethas -p /etc/has
+KdcInitTool: exit
+```
+
+You will get has-client.conf like the following:
+```
+[HAS]
+  https_host = emr-header-1,emr-worker-1
+  https_port = 8092
+  filter_auth_type = kerberos
+  enable_conf = true
+
+[PLUGIN]
+  auth_type = RAM
+```
+
+Hadoop user can use HAS HA feature by updating **core-site.xml** without Reexport has-client.conf.
+add the following properties:
+```
+<property>
+   <name>hadoop.security.has</name>
+   <value>https://emr-header-1:8092/has/v1?auth_type=RAM;https://emr-worker-1:8092/has/v1?auth_type=RAM</value>
+</property>
+```
+
+### 7. Reexport krb5.conf for HAS KDC HA
+
+```
+cd HAS/has-dist
+// Start KDC init tool:
+sh bin/kdcinit.sh <conf_dir>
+// Get krb5.conf, and put it to /etc:
+KdcInitTool: getkrb5 -p /etc
+KdcInitTool: exit
+```
+
+You will get krb5.conf like the following:
+```
+[libdefaults]
+    kdc_realm = HADOOP.COM
+    default_realm = HADOOP.COM
+    udp_preference_limit = 4096
+    kdc_tcp_port = 88
+    kdc_udp_port = 88
+
+[realms]
+    HADOOP.COM = {
+        kdc = localhost:88
+        kdc = localhost:88
+    }
+```
+
+## Verification
+
+You can use login-test tool to verify:
+
+### 1. Update hadmin.conf in <conf_dir>
+
+### 2. Run login-test tool
+```
+cd HAS/has-dist
+// Use tgt to login
+sh bin/login-test.sh tgt <conf_dir>
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/doc/has-overall.png
----------------------------------------------------------------------
diff --git a/has/doc/has-overall.png b/has/doc/has-overall.png
new file mode 100644
index 0000000..2df5e48
Binary files /dev/null and b/has/doc/has-overall.png differ

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/doc/has-start.md
----------------------------------------------------------------------
diff --git a/has/doc/has-start.md b/has/doc/has-start.md
new file mode 100644
index 0000000..43bc2eb
--- /dev/null
+++ b/has/doc/has-start.md
@@ -0,0 +1,183 @@
+How to start
+================
+
+## 1. Install
+
+### Download HAS source code:
+```
+git clone https://github.com/Intel-bigdata/HAS.git
+```
+
+### Install HAS:
+```
+cd HAS
+mvn clean install
+```
+
+## 2. Start and configure HAS server
+
+### Deploy https
+Please look at [How to deploy https](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/deploy-https.md) for details.
+
+### Configure has-server.conf in <conf_dir>:
+An example of has-server.conf:
+```
+[HAS]
+  https_host = localhost
+  https_port = 8092
+  filter_auth_type = kerberos
+  
+[PLUGIN]
+  auth_type = RAM
+```
+
+### Start HAS server:
+```
+cd HAS/has-dist
+sh bin/start-has.sh <conf_dir> <work_dir>
+```
+
+also:
+```
+export HAS_CONF_DIR=<conf_dir>
+export HAS_WORK_DIR=<work_dir>
+cd HAS/has-dist
+sh bin/start-has.sh
+```
+
+Root privileges required if https_port or KDC port numbers range from 0 to 1023.
+
+### Configure HAS plugin:
+```
+cd HAS/has-dist
+// Proxy needed to be removed if it exists
+unset https_proxy
+// Start KDC init tool
+sh bin/kdcinit.sh <conf_dir>
+// Also: sh bin/kdcinit.sh, if HAS_CONF_DIR environment variable has been set.
+// Plugin_name example: RAM
+KdcInitTool: set_plugin <plugin_name>
+KdcInitTool: exit
+```
+
+### Configure HAS backend:
+```
+cd HAS/has-dist
+// Start KDC init tool
+sh bin/kdcinit.sh <conf_dir>
+// An example of json backend:
+KdcInitTool: config_kdcBackend json /tmp/has/jsonbackend
+// An example of mysql backend:
+KdcInitTool: config_kdcBackend mysql jdbc:mysql://127.0.0.1:3306/mysqlbackend root passwd
+KdcInitTool: exit
+```
+For mysql backend, please refer to [How to use mysql backend](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/mysql-backend.md) for details.
+
+### Configure HAS KDC:
+```
+cd HAS/has-dist
+// Start KDC init tool
+sh bin/kdcinit.sh <conf_dir>
+// An example of configure HAS KDC:
+KdcInitTool: config_kdc localhost 88 HADOOP.COM
+KdcInitTool: exit
+```
+Please make sure the following configuration files exist in the conf directory:
+has-server.conf backend.conf kdc.conf
+
+### Start HAS KDC server:
+```
+cd HAS/has-dist
+// Start KDC init tool
+sh bin/kdcinit.sh <conf_dir>
+KdcInitTool: start
+KdcInitTool: exit
+```
+
+### Init HAS server:
+```
+cd HAS/has-dist
+// Start KDC init tool
+sh bin/kdcinit.sh <conf_dir>
+KdcInitTool: init
+KdcInitTool: exit
+```
+
+### Deploy http spnego
+Please look at [How to deploy http spnego](https://github.com/intel-bigdata/has/blob/release-1.0.0/doc/deploy-spnego.md) for details.
+Please restart the HAS server
+
+```
+cd HAS/has-dist
+sh bin/stop-has.sh
+
+cd HAS/has-dist
+sh bin/start-has.sh <conf_dir> <work_dir>
+
+cd HAS/has-dist
+sh bin/kdcinit.sh <conf_dir>
+KdcInitTool: start
+KdcInitTool: exit
+```
+
+### Get krb5.conf:
+```
+cd HAS/has-dist
+// Start KDC init tool:
+sh bin/kdcinit.sh <conf_dir>
+// Get krb5.conf, and put it to /etc:
+KdcInitTool: getkrb5 -p /etc
+KdcInitTool: exit
+```
+
+### Get has-client.conf:
+```
+cd HAS/has-dist
+// Start KDC init tool
+sh bin/kdcinit.sh <conf_dir>
+// Get has-client.conf, and put it to /etc/has:
+KdcInitTool: gethas -p /etc/has
+KdcInitTool: exit
+```
+
+## 3. Prepare for Hadoop
+
+### Create service principals:
+```
+cd HAS/has-dist
+echo { \
+    HOSTS: [ \
+       {"name":"<host>","hostRoles":"<role>,..., <role>"\}, \
+       ...
+       {"name":"<host>","hostRoles":"<role>,...,<role>"\} \
+    ] \
+\} > hosts.txt
+// Start local hadmin tool
+sh bin/hadmin-local.sh <conf_dir> -k <keytab>
+// Also: sh bin/hadmin-local.sh -k <keytab>, if HAS_CONF_DIR environment variable has been set.
+// Also you can use remote hadmin tool, admin.keytab file needed to be placed in /etc/has
+sh bin/hadmin-remote.sh <conf_dir>
+// Also: sh bin/hadmin-remote.sh, if HAS_CONF_DIR environment variable has been set.
+HadminLocalTool.local: creprincs hosts.txt
+HadminLocalTool.local: exit
+```
+The admin.keytab file is created by the kdcinit. In local and remote hadmin tool, you can type "?" for help.
+
+### Get hostRoles list:
+```
+cd HAS/has-dist
+// Start local or remote hadmin tool
+sh bin/hadmin-local.sh(bin/hadmin-remote.sh) <conf_dir> -k <keytab>
+HadminLocalTool.local: hostroles
+HadminLocalTool.local: exit
+```
+
+### Export service keytabs:
+```
+cd HAS/has-dist
+// Start local or remote hadmin tool
+sh bin/hadmin-local.sh(bin/hadmin-remote.sh) <conf_dir> -k <keytab>
+// An example of exporting keytabs of localhost(hostname):
+HadminLocalTool.local: expkeytabs localhost
+HadminLocalTool.local: exit
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/doc/mysql-backend.md
----------------------------------------------------------------------
diff --git a/has/doc/mysql-backend.md b/has/doc/mysql-backend.md
new file mode 100644
index 0000000..4a06be3
--- /dev/null
+++ b/has/doc/mysql-backend.md
@@ -0,0 +1,23 @@
+MySQL Backend
+===============
+
+## Install MySQL
+
+Please refer to [install mysql](https://dev.mysql.com/doc/refman/5.7/en/linux-installation.html).
+
+## Config backend
+```
+// Url: jdbc url of mysql database; mysqlbackend: name of has mysql backend database; username: mysql user name; password: mysql password
+cd HAS/has-dist
+sh bin/kdcinit.sh conf
+KdcInitTool: config_kdcBackend mysql jdbc:mysql://127.0.0.1:3306/mysqlbackend?createDatabaseIfNotExist=true root passwd
+KdcInitTool: exit
+```
+
+## Config kdc
+```
+cd HAS/has-dist
+sh bin/kdcinit.sh conf
+KdcInitTool: config_kdc localhost 88 HADOOP.COM
+KdcInitTool: exit
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/doc/performance-report.md
----------------------------------------------------------------------
diff --git a/has/doc/performance-report.md b/has/doc/performance-report.md
new file mode 100644
index 0000000..e542782
--- /dev/null
+++ b/has/doc/performance-report.md
@@ -0,0 +1,98 @@
+# HAS Performance Test Report
+
+## 1. Overview
+
+HAS is a dedicated Hadoop authentication server to support various authentication mechanisms other than just Kerberos. With HAS users can remain their familiar login methods, and new authentication mechanism could be customized and plugined.  
+
+A Hadoop cluster could have thousands of nodes, there maybe so many authentication  requests are sent to HAS server at the same time. So the stability in high concurrency is so important for HAS.
+
+## 2. Test Environment
+
+The test use Alibaba Cloud Elastic Compute Service, detailed test environment like the following: 
+
+### 2.1 Hardware environment
+
+* HAS Server:
+
+> CPU:Intel(R) Xeon(R)CPU E5-2682 @ 2.50GHz    
+> MEM: 16GB    
+> Disk: 43GB 86GB    
+
+* HAS Client:
+
+> CPU:Intel(R) Xeon(R)CPU E5-2682 @ 2.50GHz    
+> MEM: 16GB    
+> Disk: 43GB 86GB * 3
+
+### 2.2 Software environment
+
+> OS: CentOS 7.2    
+> JAVA: 1.8    
+> HAS: 1.0.0    
+> MySQL: 5.5.52  
+
+## 3. Test Method
+
+By using [login-test](https://github.com/Intel-bigdata/HAS/blob/master/has-dist/bin/login-test.sh) scripting tool, the test can be broadly divided into four steps:
+
+1. Add principals to HAS server
+2. Export keytab files to HAS Client  
+    
+    ```shell
+    cd HAS/has-dist         
+    sh bin/login-test add <conf_dir> <work_dir> <principal_num>
+    ```
+
+3. Use keytab files to login concurrently
+
+    ```shell                        
+    sh bin/login-test run <conf_dir> <work_dir> <concurrency_num>
+    ```
+
+4. Record login result and the cost time of login
+
+Testing process like the following:
+
+![testing process](https://user-images.githubusercontent.com/9171954/27905170-b7637602-6271-11e7-8fc9-27d494f9b1ee.jpg)
+
+## 4. Test Result
+
+The test result consists of total cost time and time per request of login using keytab file.
+
+### 4.1 Using Json Backend
+
+| Concurrency | 100 | 500 | 1000 | 5000 | 8000 | 10000 |
+| :---: | :---: | :---: | :---: | :---: | :---: | :---: |
+| Result | Success | Success  | Success  | Success | Success | Success |
+| Total time (ms) | 540 | 1115 | 1661  | 4571 | 6328 | 7208 |
+| Time per request (ms)| 5.400 | 2.230 | 1.661 | 0.914 | 0.791 | 0.721 |
+
+### 4.2 Using MySQL Backend
+
+MySQL Configuration:
+> max connection: 5000              
+> innodb buffer size: 8G
+
+| Concurrency | 100 | 500 | 1000 | 5000 | 8000 | 10000 |
+| :---: | :---: | :---: | :---: | :---: | :---: | :---: |
+| Result | Success | Success  | Success  | Success | Success | Success |
+| Total time (ms) | 765 | 2880  | 4821  | 12712 | 21419 | 22968 |
+| Time per request (ms)| 7.650 | 5.760  | 4.821  | 2.542 | 2.677 | 2.297 |
+
+## 5. Conclusion
+
+![performance in different backends](https://user-images.githubusercontent.com/9171954/27905152-a9bc2a44-6271-11e7-8ddc-16222ee7d3c4.png)
+
+Figure above demonstrates the time per request of HAS authentication in different backends and concurrency. As can be seen, HAS can complete authentication work in high concurrency, and has a good performance. So HAS is good enough for Hadoop.
+
+The CPU utilization and network IO of HAS server are demonstrated in the appendix, with the number of concurrency up to 10000. The appendix shows that HAS server is not under heavy workload in mysql backend. 
+
+## 6. Appendix
+
+* CPU Utilization
+
+![cpu utilization](https://user-images.githubusercontent.com/9171954/27905176-bf7ea410-6271-11e7-904e-abd1bf532725.jpg)
+
+* Network IO
+
+![network io](https://user-images.githubusercontent.com/9171954/27905186-c717b784-6271-11e7-96d3-2fd317defd96.jpg)

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/doc/rest-api.md
----------------------------------------------------------------------
diff --git a/has/doc/rest-api.md b/has/doc/rest-api.md
new file mode 100644
index 0000000..085471b
--- /dev/null
+++ b/has/doc/rest-api.md
@@ -0,0 +1,229 @@
+REST API
+==========
+
+## Config API        AuthType:HTTPS
+
+### Set HAS Plugin
+* Submit a HTTP PUT request.
+```
+    https://<host>:<port>/has/v1/conf/setplugin?plugin=<plugin>
+```
+Example:
+```
+    put https://<host>:<port>/has/v1/conf/setplugin?plugin=RAM
+    Code:200
+    Content-Type:text/plain
+    Content:
+        HAS plugin set successfully.
+```
+### Configure HAS backend
+* Submit a HTTP PUT request.
+```
+    https://<host>:<port>/has/v1/conf/configkdcbackend?backendType=<backendType>
+        [&dir=<dir>] [&url=<url>] [&user=<user>] [&password=<password>]
+```
+Example:
+```
+    PUT https://<host>:<port>/has/v1/conf/configkdcbackend?backendType=json&dir=/tmp/has/jsonbackend
+    Code:200
+    Content-Type:text/plain
+    Content:
+        Json backend set successfully.
+```
+### Configure HAS KDC
+* Submit a HTTP PUT request.
+```
+    https://<host>:<port>/has/v1/conf/configkdc?realm=<realm>&host=<host>&port=<port>
+```
+Example:
+```
+    PUT https://<host>:<port>/has/v1/conf/configkdc?realm=HADOOP.COM&host=localhost&port=88
+    Code:200
+    Content-Type:text/plain
+    Content:
+        HAS server KDC set successfully.
+ ```   
+### Get HAS krb5 conf
+* Submit a HTTP GET request.
+```
+    https://<host>:<port>/has/v1/getkrb5conf
+```
+Example:
+```   
+    GET https://<host>:<port>/has/v1/getkrb5conf
+    Code:200
+    Content-Disposition:attachment;filename=krb5.conf
+```
+### Get Has conf
+* Submit a HTTP GET request.
+```
+    https://<host>:<port>/has/v1/gethasconf
+```
+Example:
+```
+    GET https://<host>:<port>/has/v1/gethasconf
+    Code:200
+    Content-Disposition:attachment;filename=has-client.conf
+```
+## Admin API        AuthType:HTTPS,kerberos  
+### Get HAS principals
+* Submit a HTTP GET request.
+```
+    https://<host>:<port>/has/v1/admin/getprincipals [?exp=<exp>]
+```
+Example:
+```   
+    GET https://<host>:<port>/has/v1/admin/getprincipals
+    Code:200
+    Content-Type:application/json
+    Content:
+        {
+            "result":"success",
+            "msg":"[
+                        \"HTTP\\\/host1@HADOOP.COM\",
+                        \"HTTP\\\/host2@HADOOP.COM\",
+                        \"hbase\\\/host2@HADOOP.COM\",
+                        \"hdfs\\\/host1@HADOOP.COM\",
+                        \"hdfs\\\/host2@HADOOP.COM\",
+                        \"yarn\\\/host1@HADOOP.COM\",
+                        \"yarn\\\/host2@HADOOP.COM\"
+                   ]"
+        }
+```     
+### Add HAS principal
+```
+    https://<host>:<port>/has/v1/admin/addprincipal?principal=<principal> [&password=<password>]
+```
+Example:
+```
+    POST https://<host>:<port>/has/v1/admin/addprincipal?principal=admin
+    Code:200
+    Content-Type:application/json
+    Content:
+        {
+            "result":"success",
+            "msg":"Add principal successfully."
+        }
+```  
+### Rename HAS principal
+* Submit a HTTP POST request.
+```
+    https://<host>:<port>/has/v1/admin/renameprincipal?oldprincipal=<oldprincipal>&newprincipal=<newprincipal>
+```
+Example:
+```
+    POST https://<host>:<port>/has/v1/admin/renameprincipal?oldprincipal=admin&newprincipal=admin/admin
+    Code:200
+    Content-Type:application/json
+    Content:
+        {
+            "result":"success",
+            "msg":"Rename principal successfully."
+        }
+```   
+### Delete HAS principal
+* Submit a HTTP DELETE request.
+```
+    https://<host>:<port>/has/v1/admin/deleteprincipal?principal=<principal>
+```
+Example:
+```
+    DELETE https://<host>:<port>/has/v1/admin/deleteprincipal?principal=admin'
+    Code:200
+    Content-Type:application/json
+    Content:
+        {
+            "result":"success",
+            "msg":"Delete principal successfully."
+        }
+```     
+### Create service principals
+* Submit a HTTP PUT request.
+```
+    https://<host>:<port>/has/v1/admin/createprincipals
+    Content-Type:application/json
+```
+Example:
+```   
+    Request:
+    PUT https://<host>:<port>/has/v1/admin/createprincipals
+    Content-Type:application/json
+    Content:
+    {
+        HOSTS: [
+            {"name":"host1","hostRoles":"HDFS"},    //hostRoles segmentation by ,
+            {"name":"host2","hostRoles":"HDFS,HBASE"}
+        ] 
+    }
+    Response:
+    Code:200
+    Content-Type:application/json
+    Content:
+        {
+            "result":"success",
+            "msg":"Already add princ :hdfs\/host1@HADOOP.COM
+                   Already add princ :yarn\/host1@HADOOP.COM
+                   Already add princ :hdfs\/host2@HADOOP.COM
+                   Already add princ :yarn\/host2@HADOOP.COM
+                   Already add princ :hbase\/host2@HADOOP.COM"
+        }
+```
+### Export service keytabs
+* Submit a HTTP GET request.
+```
+    https://<host>:<port>/has/v1/kadmin/exportkeytabs?host=<host> [&role=<role>]
+```
+Example:
+```
+    GET https://<host>:<port>/has/v1/admin/exportkeytabs?host=host1
+    Code:200
+    Content-Disposition:attachment;filename=keytab.zip
+```
+## User API        AuthType:HTTPS
+
+### Start HAS server
+* Submit a HTTP GET request.
+```
+    https://<host>:<port>/has/v1/kdcstart
+```
+Example:
+```
+    GET https://<host>:<port>/has/v1/kdcstart
+    Code:200
+    Content-Type:application/json
+    Content:
+        {
+            "result":"success",
+            "msg":"Succeed in starting KDC server."
+        }
+```
+### Init HAS server
+* Submit a HTTP GET request.
+```
+    https://<host>:<port>/has/v1/kdcinit
+```
+Example:
+``` 
+    GET https://<host>:<port>/has/v1/kdcinit
+    Code:200
+    Content-Disposition:attachment;filename=admin.keytab
+```
+### Get hostRoles list
+* Submit a HTTP GET request.
+```
+    https://<host>:<port>/has/v1/hostroles
+```
+Example:
+```
+    GET https://<host>:<port>/has/v1/hostroles
+    Code:200
+    Content-Type:application/json
+    
+    Content:
+    [
+        {"HostRole":"HDFS","PrincipalNames":["HTTP","hdfs"]},
+        {"HostRole":"YARN","PrincipalNames":["yarn"]},
+        {"HostRole":"HBASE","PrincipalNames":["hbase"]},
+        {"HostRole":"ZOOKEEPER","PrincipalNames":["zookeeper"]}
+    ]
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-client/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-client/pom.xml b/has/has-client/pom.xml
new file mode 100644
index 0000000..c5cabe9
--- /dev/null
+++ b/has/has-client/pom.xml
@@ -0,0 +1,84 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>has-project</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>has-client</artifactId>
+  <description>HAS client</description>
+  <name>HAS client</name>
+  <packaging>jar</packaging>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-core</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-server-api-all</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-util</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-client</artifactId>
+      <version>1.19</version>
+    </dependency>
+    <dependency>
+    <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-json</artifactId>
+      <version>1.19</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-codec</groupId>
+      <artifactId>commons-codec</artifactId>
+      <scope>compile</scope>
+      <version>${commons-codec.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>${slf4j.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>has-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-simplekdc</artifactId>
+      <version>${kerby.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-text</artifactId>
+      <version>1.1</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>4.12</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.containers</groupId>
+      <artifactId>jersey-container-servlet-core</artifactId>
+      <version>2.17</version>
+    </dependency>
+  </dependencies>
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-client/src/main/java/org/apache/hadoop/has/client/AbstractHasClientPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/AbstractHasClientPlugin.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/AbstractHasClientPlugin.java
new file mode 100644
index 0000000..9ff9749
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/hadoop/has/client/AbstractHasClientPlugin.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.client;
+
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.kerby.kerberos.kerb.KrbRuntime;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class AbstractHasClientPlugin implements HasClientPlugin {
+    public static final Logger LOG = LoggerFactory.getLogger(AbstractHasClientPlugin.class);
+
+    protected abstract void doLogin(AuthToken token) throws HasLoginException;
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public AuthToken login(HasConfig conf) throws HasLoginException {
+
+        AuthToken authToken = KrbRuntime.getTokenProvider("JWT").createTokenFactory().createToken();
+
+        doLogin(authToken);
+
+        return authToken;
+    }
+
+}


[06/10] directory-kerby git commit: Add the HAS project to Kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/kdcinit.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/kdcinit.sh b/has/has-dist/bin/kdcinit.sh
new file mode 100644
index 0000000..f6e30c3
--- /dev/null
+++ b/has/has-dist/bin/kdcinit.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+CONF_DIR=$1
+APP_MAIN=org.apache.hadoop.has.tool.client.kdcinit.HasInitTool
+
+# Reset HAS_CONF_DIR if CONF_DIR not null
+if [ "$CONF_DIR" != "" ]; then
+  if [ ! -d "$CONF_DIR" ]; then
+    echo "[ERROR] ${CONF_DIR} is not a directory"
+    usage
+  fi
+else
+  if [ "$HAS_CONF_DIR" != "" ] && [ -d "$HAS_CONF_DIR" ]; then
+    CONF_DIR=${HAS_CONF_DIR}
+  else
+    echo "[ERROR] HAS_CONF_DIR is null or not a directory"
+    exit
+  fi
+fi
+
+# Load HAS environment variables
+if [ -f "${CONF_DIR}/has-env.sh" ]; then
+  . "${CONF_DIR}/has-env.sh"
+fi
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+  if [ X"$var" = X"-D" ]; then
+    DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8001,server=y,suspend=y"
+  fi
+done
+
+echo "[INFO] conf_dir=$CONF_DIR"
+HAS_OPTS="-DHAS_LOGFILE=kdcinit"
+
+java ${DEBUG} -classpath target/lib/*:. ${HAS_OPTS} ${APP_MAIN} ${CONF_DIR}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/kinit.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/kinit.sh b/has/has-dist/bin/kinit.sh
new file mode 100644
index 0000000..3d605d6
--- /dev/null
+++ b/has/has-dist/bin/kinit.sh
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+APP_MAIN=org.apache.hadoop.has.tool.client.kinit.KinitTool
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+  if [ X"$var" = X"-D" ]; then
+    DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8005,server=y,suspend=y"
+  else
+    args="$args $var"
+  fi
+done
+
+java $DEBUG \
+-classpath target/lib/*:. \
+-DHAS_LOGFILE=kinit \
+${APP_MAIN} $args
+

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/klist.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/klist.sh b/has/has-dist/bin/klist.sh
new file mode 100644
index 0000000..0643ae7
--- /dev/null
+++ b/has/has-dist/bin/klist.sh
@@ -0,0 +1,37 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+APP_MAIN=org.apache.hadoop.has.tool.client.klist.KlistTool
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+  if [ X"$var" = X"-D" ]; then
+    DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8006,server=y,suspend=y"
+  else
+    args="$args $var"
+  fi
+done
+
+java $DEBUG \
+-classpath target/lib/*:. \
+-DHAS_LOGFILE=klist \
+${APP_MAIN} $args

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/login-test.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/login-test.sh b/has/has-dist/bin/login-test.sh
new file mode 100644
index 0000000..f26b1df
--- /dev/null
+++ b/has/has-dist/bin/login-test.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+APP_MAIN=org.apache.hadoop.has.tool.client.hclient.HasClientLoginTool
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+  if [ X"$var" = X"-D" ]; then
+    DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8004,server=y,suspend=y"
+  else
+    args="$args $var"
+  fi
+done
+
+java ${DEBUG} -classpath target/lib/*:. ${APP_MAIN} ${args}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/quick-start.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/quick-start.sh b/has/has-dist/bin/quick-start.sh
new file mode 100644
index 0000000..fbe6812
--- /dev/null
+++ b/has/has-dist/bin/quick-start.sh
@@ -0,0 +1,68 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+BASE_DIR=$(cd `dirname $0`/..; pwd)
+
+# 1. Start HAS server
+echo "Start HAS server..."
+sudo sh $BASE_DIR/bin/start-has.sh $BASE_DIR/conf $BASE_DIR/conf &
+sleep 3s
+cat nohup.log
+
+# 2. Config Backend
+echo "Config Backend..."
+curl -X PUT "http://localhost:8091/has/v1/conf/configKdcBackend?backendType=json&dir=/tmp/has/jsonbackend"
+sleep 2s
+
+# 3. Set Realm
+echo "Set Realm..."
+curl -X PUT "http://localhost:8091/has/v1/conf/setKdcRealm?realm=ALIYUN.COM"
+sleep 2s
+
+# 4. Start HAS
+curl -X GET "http://localhost:8091/has/v1/kdcstart"
+sleep 2s
+
+# 5. Init HAS
+echo "Init HAS..."
+curl -o admin.keytab "http://host:8091/has/v1/kdcinit"
+sleep 2s
+
+# 6. Create Principals
+echo "Create Principals..."
+echo \
+{\
+    HOSTS: [\
+        \{\"name\":\"nn\",\"hostRoles\":\"HDFS,YARN,HBASE,ZOOKEEPER\"\}, \
+        \{\"name\":\"dn1\",\"hostRoles\":\"HDFS,YARN,HBASE,ZOOKEEPER\"\}, \
+        \{\"name\":\"dn2\",\"hostRoles\":\"HDFS,YARN,HBASE,ZOOKEEPER\"\} \
+    ] \
+\} > hosts.txt
+curl -T hosts.txt "http://localhost:8091/has/v1/admin/createprincipals"
+sleep 2s
+
+# 7. Get Host Roles List
+echo "Get host roles list..."
+curl -X GET "http://localhost:8091/has/v1/hostroles"
+sleep 2s
+
+# 8. Export keytab files
+echo "Export keytab files..."
+curl -o nn_keytab.zip "http://localhost:8091/has/v1/admin/exportkeytabs?host=nn"
+curl -o dn1_keytab.zip "http://localhost:8091/has/v1/admin/exportkeytabs?host=dn1"
+curl -o dn2_keytab.zip "http://localhost:8091/has/v1/admin/exportkeytabs?host=dn2"
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/start-has.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/start-has.sh b/has/has-dist/bin/start-has.sh
new file mode 100644
index 0000000..95a6913
--- /dev/null
+++ b/has/has-dist/bin/start-has.sh
@@ -0,0 +1,115 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+usage()
+{
+  echo "Usage: sh bin/start-has.sh <conf_dir> <working_dir>"
+  echo "    Example:"
+  echo "        sh bin/start-has.sh conf work"
+  exit
+}
+
+CONF_DIR=$1
+WORK_DIR=$2
+pid=/tmp/has.pid # Pid file to save pid numbers
+APP_MAIN=org.apache.hadoop.has.server.HasServer
+
+# Reset HAS_CONF_DIR and HAS_WORK_DIR if CONF_DIR or WORK_DIR not null
+if [ "$CONF_DIR" != "" ]; then
+  if [ ! -d "$CONF_DIR" ]; then
+    echo "[ERROR] ${CONF_DIR} is not a directory"
+    usage
+  fi
+else
+  if [ "$HAS_CONF_DIR" != "" ] && [ -d "$HAS_CONF_DIR" ]; then
+    CONF_DIR=${HAS_CONF_DIR}
+  else
+    echo "[ERROR] HAS_CONF_DIR is null or not a directory"
+    exit
+  fi
+fi
+
+# Load HAS environment variables
+if [ -f "${CONF_DIR}/has-env.sh" ]; then
+  . "${CONF_DIR}/has-env.sh"
+fi
+
+if [ "${WORK_DIR}" != "" ]; then
+  if [ ! -d "$WORK_DIR" ]; then
+    echo "[ERROR] ${WORK_DIR} is not a directory"
+    usage
+  fi
+else
+  if [ "$HAS_WORK_DIR" != "" ] && [ -d "$HAS_WORK_DIR" ]; then
+    WORK_DIR=${HAS_WORK_DIR}
+  else
+    echo "[ERROR] HAS_WORK_DIR is null or not a directory"
+    exit
+  fi
+fi
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+  if [ X"$var" = X"-D" ]; then
+    DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8000,server=y,suspend=n"
+  fi
+done
+args="$CONF_DIR $WORK_DIR"
+
+echo "[INFO] conf_dir=$CONF_DIR"
+echo "[INFO] work_dir=$WORK_DIR"
+
+HAS_OPTS="$HAS_JVM_OPTS -DHAS_LOGFILE=has"
+
+# Print a warning if has servers are already running
+if [ -f ${pid} ]; then
+  active=()
+  while IFS='' read -r p || [ -n "$p" ]; do
+    kill -0 ${p} >/dev/null 2>&1
+    if [ $? -eq 0 ]; then
+      active+=(${p})
+    fi
+  done < "$pid"
+
+  count="${#active[@]}"
+
+  if [ "$count" -gt 0 ]; then
+    echo "[WARN] ${count} instance(s) of HAS server are already running."
+  fi
+fi
+
+echo "Starting HAS server..."
+
+# Start HAS server
+java ${DEBUG} -classpath target/lib/*:. ${HAS_OPTS} ${APP_MAIN} -start ${args} > /dev/null 2>&1 &
+
+mypid=$!
+
+# Add mypid to pid file if start successfully
+sleep 3
+if [ "$mypid" -gt 0 ] && kill -0 "$mypid" > /dev/null 2>&1; then
+  echo ${mypid} >> ${pid}
+  echo "[SUCCESS] HAS server (pid: ${mypid}) has been started."
+else
+  echo "[ERROR] Failed to start HAS server."
+  exit 1
+fi

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/stop-has.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/stop-has.sh b/has/has-dist/bin/stop-has.sh
new file mode 100644
index 0000000..6ca414d
--- /dev/null
+++ b/has/has-dist/bin/stop-has.sh
@@ -0,0 +1,75 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+OPERATION=$1
+pid=/tmp/has.pid # Pid file
+
+stop()
+{
+  if kill -0 ${to_stop} > /dev/null 2>&1; then
+    echo "Stopping HAS server (pid: ${to_stop})..."
+    kill ${to_stop}
+    sleep 5
+    if kill -0 ${pid} > /dev/null 2>&1; then
+      echo "[WARN] HAS server still alive after 5 seconds, Trying to kill it by force."
+      kill -9 ${to_stop}
+    else
+      echo "[SUCCESS] HAS server has been stopped."
+    fi
+  else
+    echo "[INFO] Skipping HAS server (pid: ${to_stop}), because it is not running anymore."
+  fi
+}
+
+case ${OPERATION} in
+
+  (all)
+    if [ -f "$pid" ]; then
+      mv ${pid} ${pid}.tmp
+      cat ${pid}.tmp | while read to_stop; do
+        stop
+      done < ${pid}.tmp
+      rm ${pid}.tmp
+    else
+      echo "[INFO] No HAS server to stop."
+    fi
+  ;;
+
+  (*)
+    if [ -f "$pid" ]; then
+      # Get latest pid number in pid file
+      to_stop=$(tail -n 1 ${pid})
+
+      if [ -z "$to_stop" ]; then
+        rm ${pid} # If $to_stop is null, delete the pid file
+        echo "[INFO] No HAS server to stop."
+      else
+        sed \$d ${pid} > ${pid}.tmp
+        if [ $(wc -l < ${pid}.tmp) -eq 0 ]; then
+          rm ${pid}.tmp ${pid} # If all stopped, clean up pid files
+        else
+          mv ${pid}.tmp ${pid}
+        fi
+        stop
+      fi
+
+    else
+      echo "[INFO] No HAS server to stop."
+    fi
+  ;;
+esac

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/backend.conf
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/backend.conf b/has/has-dist/conf/backend.conf
new file mode 100644
index 0000000..99c9d0a
--- /dev/null
+++ b/has/has-dist/conf/backend.conf
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+kdc_identity_backend = org.apache.kerby.kerberos.kdc.identitybackend.JsonIdentityBackend
+backend.json.dir = /tmp/has/jsonbackend
+mysql_url = jdbc:mysql://127.0.0.1:3306/mysqlbackend
+mysql_user = root 
+mysql_password = passwd

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/hadmin.conf
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/hadmin.conf b/has/has-dist/conf/hadmin.conf
new file mode 100644
index 0000000..e950aea
--- /dev/null
+++ b/has/has-dist/conf/hadmin.conf
@@ -0,0 +1,6 @@
+[HAS]
+    https_host = plusplus-desktop
+    https_port = 8092
+    admin_keytab = /etc/has/admin.keytab
+    admin_keytab_principal = kadmin/HADOOP.COM@HADOOP.COM
+    filter_auth_type = kerberos

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/has-env.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/has-env.sh b/has/has-dist/conf/has-env.sh
new file mode 100644
index 0000000..d390ee9
--- /dev/null
+++ b/has/has-dist/conf/has-env.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Set HAS environment variables here.
+
+###
+# Specify the JVM options to be used when starting HAS server.
+# These options will be appended to the options specified as HAS_OPTS
+#
+# export HAS_JVM_OPTS=""
+
+# HAS work directory
+#
+# export HAS_WORK_DIR=""

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/has-server.conf
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/has-server.conf b/has/has-dist/conf/has-server.conf
new file mode 100644
index 0000000..5fb801e
--- /dev/null
+++ b/has/has-dist/conf/has-server.conf
@@ -0,0 +1,27 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[HAS]
+  https_host = plusplus-desktop
+  https_port = 8092
+  filter_auth_type = kerberos
+  enable_conf = false
+  ssl_client_cert = /etc/has/cert-signed
+  
+[PLUGIN]
+  auth_type = RAM

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/kdc.conf
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/kdc.conf b/has/has-dist/conf/kdc.conf
new file mode 100644
index 0000000..a2132ac
--- /dev/null
+++ b/has/has-dist/conf/kdc.conf
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[kdcdefaults]
+  kdc_host = plusplus-desktop
+  kdc_udp_port = 88
+  kdc_tcp_port = 88
+  kdc_realm = HADOOP.COM

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/krb5.conf
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/krb5.conf b/has/has-dist/conf/krb5.conf
new file mode 100644
index 0000000..6c64cf5
--- /dev/null
+++ b/has/has-dist/conf/krb5.conf
@@ -0,0 +1,29 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[libdefaults]
+    kdc_realm = HADOOP.COM
+    default_realm = HADOOP.COM
+    udp_preference_limit = 4096
+    kdc_tcp_port = 88
+    kdc_udp_port = 88
+    ticket_lifetime = 60
+[realms]
+    HADOOP.COM = {
+        kdc = plusplus-desktop:88
+    }

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/log4j.properties
----------------------------------------------------------------------
diff --git a/has/has-dist/log4j.properties b/has/has-dist/log4j.properties
new file mode 100644
index 0000000..1b36fec
--- /dev/null
+++ b/has/has-dist/log4j.properties
@@ -0,0 +1,27 @@
+#############################################################################
+#    Licensed to the Apache Software Foundation (ASF) under one or more
+#    contributor license agreements.  See the NOTICE file distributed with
+#    this work for additional information regarding copyright ownership.
+#    The ASF licenses this file to You under the Apache License, Version 2.0
+#    (the "License"); you may not use this file except in compliance with
+#    the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS,
+#    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#    See the License for the specific language governing permissions and
+#    limitations under the License.
+#############################################################################
+log4j.rootLogger=INFO,R
+
+log4j.appender.STDOUT=org.apache.log4j.ConsoleAppender
+log4j.appender.STDOUT.layout=org.apache.log4j.PatternLayout
+log4j.appender.STDOUT.layout.ConversionPattern=Logger-->%5p{%F:%L}-%m%n
+
+log4j.appender.R=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.R.File=logs/${HAS_LOGFILE}.log
+log4j.appender.R.layout=org.apache.log4j.PatternLayout
+log4j.appender.R.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %5p{%F:%L}-%m%n
+

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-dist/pom.xml b/has/has-dist/pom.xml
new file mode 100644
index 0000000..81eccc5
--- /dev/null
+++ b/has/has-dist/pom.xml
@@ -0,0 +1,91 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>has-project</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>has-dist</artifactId>
+  <description>HAS dist</description>
+  <name>HAS dist</name>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>has-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>has-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>has-client</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>has-server</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>has-client-tool</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>has-server-tool</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>target/lib</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <appendAssemblyId>false</appendAssemblyId>
+          <descriptors>
+            <descriptor>assembly.xml</descriptor>
+          </descriptors>
+        </configuration>
+        <executions>
+          <execution>
+            <id>make-assembly</id>
+            <phase>package</phase>
+            <goals>
+              <goal>single</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/webapps/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/has/has-dist/webapps/WEB-INF/web.xml b/has/has-dist/webapps/WEB-INF/web.xml
new file mode 100644
index 0000000..b13cb1f
--- /dev/null
+++ b/has/has-dist/webapps/WEB-INF/web.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License. See accompanying LICENSE file.
+-->
+<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee">
+
+</web-app>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/webapps/has/index.html
----------------------------------------------------------------------
diff --git a/has/has-dist/webapps/has/index.html b/has/has-dist/webapps/has/index.html
new file mode 100644
index 0000000..6f80950
--- /dev/null
+++ b/has/has-dist/webapps/has/index.html
@@ -0,0 +1,24 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta http-equiv="REFRESH" charset="UTF-8" />
+<title>HAS Administration</title>
+</head>
+</html>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-plugins/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-plugins/pom.xml b/has/has-plugins/pom.xml
new file mode 100644
index 0000000..3cdc12a
--- /dev/null
+++ b/has/has-plugins/pom.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>has-project</artifactId>
+        <groupId>org.apache.hadoop</groupId>
+        <version>1.0.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>has-plugins</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>has-common</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.aliyun</groupId>
+            <artifactId>aliyun-java-sdk-ram</artifactId>
+            <version>2.0.7</version>
+        </dependency>
+        <dependency>
+            <groupId>com.aliyun</groupId>
+            <artifactId>aliyun-java-sdk-core</artifactId>
+            <version>2.2.3</version>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.12</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>has-client</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>has-server</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+    </dependencies>
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.client.HasClientPlugin
----------------------------------------------------------------------
diff --git a/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.client.HasClientPlugin b/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.client.HasClientPlugin
new file mode 100644
index 0000000..9f6edbc
--- /dev/null
+++ b/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.client.HasClientPlugin
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+org.apache.hadoop.has.plugins.client.aliyun.AliyunHasClientPlugin

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.server.HasServerPlugin
----------------------------------------------------------------------
diff --git a/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.server.HasServerPlugin b/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.server.HasServerPlugin
new file mode 100644
index 0000000..fa342e5
--- /dev/null
+++ b/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.server.HasServerPlugin
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+org.apache.hadoop.has.plugins.server.aliyun.AliyunHasServerPlugin

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasClientPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasClientPluginRegistry.java b/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasClientPluginRegistry.java
new file mode 100644
index 0000000..4c60250
--- /dev/null
+++ b/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasClientPluginRegistry.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.plugins;
+
+import org.apache.hadoop.has.client.HasClientPluginRegistry;
+import org.apache.hadoop.has.common.HasException;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.Set;
+
+public class TestHasClientPluginRegistry {
+
+  @Test
+  public void testInit() {
+    Set<String> pluginsNames = HasClientPluginRegistry.registeredPlugins();
+    Assert.assertTrue(pluginsNames.size() > 0);
+  }
+
+  @Test
+  public void testCreatePlugin() throws HasException {
+    Assert.assertTrue(HasClientPluginRegistry.createPlugin("RAM") != null);
+    Set<String> pluginNames = HasClientPluginRegistry.registeredPlugins();
+    for (String name : pluginNames) {
+      HasClientPluginRegistry.createPlugin(name);
+    }
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasServerPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasServerPluginRegistry.java b/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasServerPluginRegistry.java
new file mode 100644
index 0000000..78f307f
--- /dev/null
+++ b/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasServerPluginRegistry.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.plugins;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.HasServerPluginRegistry;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.Set;
+
+public class TestHasServerPluginRegistry {
+
+  @Test
+  public void testInit() {
+    Set<String> pluginsNames = HasServerPluginRegistry.registeredPlugins();
+    Assert.assertTrue(pluginsNames.size() > 0);
+  }
+
+  @Test
+  public void testCreatePlugin() throws HasException {
+    Assert.assertTrue(HasServerPluginRegistry.createPlugin("RAM") != null);
+    Set<String> pluginNames = HasServerPluginRegistry.registeredPlugins();
+    for (String name : pluginNames) {
+      HasServerPluginRegistry.createPlugin(name);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-server/pom.xml b/has/has-server/pom.xml
new file mode 100644
index 0000000..30a4aa8
--- /dev/null
+++ b/has/has-server/pom.xml
@@ -0,0 +1,118 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>has-project</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>has-server</artifactId>
+  <description>HAS server</description>
+  <name>HAS server</name>
+  <packaging>jar</packaging>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.bouncycastle</groupId>
+      <artifactId>bcprov-jdk15on</artifactId>
+      <version>1.58</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.5</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-dbutils</groupId>
+      <artifactId>commons-dbutils</artifactId>
+      <version>1.6</version>
+    </dependency>
+    <dependency>
+      <groupId>mysql</groupId>
+      <artifactId>mysql-connector-java</artifactId>
+      <version>5.1.42</version>
+    </dependency>
+    <dependency>
+      <groupId>com.h2database</groupId>
+      <artifactId>h2</artifactId>
+      <version>1.4.196</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerby-config</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-identity</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-core</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-server</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.directory.server</groupId>
+      <artifactId>apacheds-core-api</artifactId>
+      <version>2.0.0-M23</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-server-api-all</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerby-kdc</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>json-backend</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>token-provider</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>3.0.0-alpha2</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>4.11</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-client</artifactId>
+      <version>1.19</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.core</groupId>
+      <artifactId>jersey-common</artifactId>
+      <version>RELEASE</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>has-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
new file mode 100644
index 0000000..b4cd5d6
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.kerby.kerberos.kerb.KrbRuntime;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class AbstractHasServerPlugin implements HasServerPlugin {
+
+    public static final Logger LOG = LoggerFactory.getLogger(AbstractHasServerPlugin.class);
+
+    protected abstract void doAuthenticate(AuthToken userToken, AuthToken authToken)
+        throws HasAuthenException;
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public AuthToken authenticate(AuthToken userToken) throws HasAuthenException {
+
+        AuthToken authToken = KrbRuntime.getTokenProvider("JWT").createTokenFactory().createToken();
+
+        doAuthenticate(userToken, authToken);
+
+        return authToken;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
new file mode 100644
index 0000000..14df580
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.hadoop.has.common.HasException;
+
+public class HasAuthenException extends HasException {
+    private static final long serialVersionUID = 171016915395892939L;
+
+    public HasAuthenException(Throwable cause) {
+        super(cause);
+    }
+
+    public HasAuthenException(String message) {
+        super(message);
+    }
+
+    public HasAuthenException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
new file mode 100644
index 0000000..cb22b8e
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
@@ -0,0 +1,701 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.commons.dbutils.DbUtils;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.web.WebConfigKey;
+import org.apache.hadoop.has.server.web.WebServer;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.kerby.kerberos.kdc.impl.NettyKdcServerImpl;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadmin;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadminImpl;
+import org.apache.kerby.kerberos.kerb.client.ClientUtil;
+import org.apache.kerby.kerberos.kerb.client.KrbConfig;
+import org.apache.kerby.kerberos.kerb.client.KrbSetting;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.identity.backend.IdentityBackend;
+import org.apache.kerby.kerberos.kerb.server.KdcServer;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+import org.apache.kerby.util.IOUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Pattern;
+
+/**
+ * The HAS KDC server implementation.
+ */
+public class HasServer {
+    public static final Logger LOG = LoggerFactory.getLogger(HasServer.class);
+
+    private static HasServer server = null;
+
+    private KrbSetting krbSetting;
+    private KdcServer kdcServer;
+    private WebServer webServer;
+    private File confDir;
+    private File workDir;
+    private String kdcHost;
+    private HasConfig hasConfig;
+
+    public HasServer(File confDir) throws KrbException {
+        this.confDir = confDir;
+    }
+
+    private void setConfDir(File confDir) {
+        this.confDir = confDir;
+    }
+
+    public File getConfDir() {
+        return confDir;
+    }
+
+    public File getWorkDir() {
+        return workDir;
+    }
+
+    public void setWorkDir(File workDir) {
+        this.workDir = workDir;
+    }
+
+    public void setKdcHost(String host) {
+        this.kdcHost = host;
+    }
+
+    public String getKdcHost() {
+        return kdcHost;
+    }
+
+    public KrbSetting getKrbSetting() {
+        return krbSetting;
+    }
+
+    public KdcServer getKdcServer() {
+        return kdcServer;
+    }
+
+    public WebServer getWebServer() {
+        return webServer;
+    }
+
+    public void setWebServer(WebServer webServer) {
+        this.webServer = webServer;
+    }
+
+    public void startKdcServer() throws HasException {
+        BackendConfig backendConfig;
+        try {
+            backendConfig = KdcUtil.getBackendConfig(getConfDir());
+        } catch (KrbException e) {
+            throw new HasException("Failed to get backend config. " + e);
+        }
+        String backendJar = backendConfig.getString("kdc_identity_backend");
+        if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
+            updateKdcConf();
+        }
+        try {
+            kdcServer = new KdcServer(confDir);
+        } catch (KrbException e) {
+            throw new HasException("Failed to create KdcServer. " + e);
+        }
+        kdcServer.setWorkDir(workDir);
+        kdcServer.setInnerKdcImpl(new NettyKdcServerImpl(kdcServer.getKdcSetting()));
+        try {
+            kdcServer.init();
+        } catch (KrbException e) {
+            LOG.error("Errors occurred when init has kdc server:  " + e.getMessage());
+            throw new HasException("Errors occurred when init has kdc server:  " + e.getMessage());
+        }
+
+        KrbConfig krbConfig = null;
+        try {
+            krbConfig = ClientUtil.getConfig(confDir);
+        } catch (KrbException e) {
+            new HasException("Errors occurred when getting the config from conf dir. "
+                + e.getMessage());
+        }
+        if (krbConfig == null) {
+            krbConfig = new KrbConfig();
+        }
+        this.krbSetting = new KrbSetting(krbConfig);
+        try {
+            kdcServer.start();
+        } catch (KrbException e) {
+            throw new HasException("Failed to start kdc server. " + e);
+        }
+        try {
+            HasUtil.setEnableConf(new File(confDir, "has-server.conf"), "false");
+        } catch (Exception e) {
+            throw new HasException("Failed to enable conf. " + e);
+        }
+        setHttpFilter();
+    }
+
+    private void setHttpFilter() throws HasException {
+        File httpKeytabFile = new File(workDir, "http.keytab");
+        LocalKadmin kadmin = new LocalKadminImpl(kdcServer.getKdcSetting(),
+            kdcServer.getIdentityService());
+        createHttpPrincipal(kadmin);
+        try {
+            kadmin.exportKeytab(httpKeytabFile, getHttpPrincipal());
+        } catch (KrbException e) {
+            throw new HasException("Failed to export keytab: " + e.getMessage());
+        }
+        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE,
+            hasConfig.getFilterAuthType());
+        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+            getHttpPrincipal());
+        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
+            httpKeytabFile.getPath());
+        webServer.defineFilter();
+    }
+
+    public File initKdcServer() throws KrbException {
+        File adminKeytabFile = new File(workDir, "admin.keytab");
+        LocalKadmin kadmin = new LocalKadminImpl(kdcServer.getKdcSetting(),
+            kdcServer.getIdentityService());
+        if (adminKeytabFile.exists()) {
+            throw new KrbException("KDC Server is already inited.");
+        }
+        kadmin.createBuiltinPrincipals();
+        kadmin.exportKeytab(adminKeytabFile, kadmin.getKadminPrincipal());
+        System.out.println("The keytab for kadmin principal "
+            + " has been exported to the specified file "
+            + adminKeytabFile.getAbsolutePath() + ", please safely keep it, "
+            + "in order to use kadmin tool later");
+
+        return adminKeytabFile;
+    }
+
+    public void createHttpPrincipal(LocalKadmin kadmin) throws HasException {
+        String httpPrincipal = getHttpPrincipal();
+        IdentityBackend backend = kdcServer.getIdentityService();
+        try {
+            if (backend.getIdentity(httpPrincipal) == null) {
+                kadmin.addPrincipal(httpPrincipal);
+            } else {
+                LOG.info("The http principal already exists in backend.");
+            }
+        } catch (KrbException e) {
+            throw new HasException("Failed to add princial, " + e.getMessage());
+        }
+    }
+
+    public String getHttpPrincipal() throws HasException {
+        String realm = kdcServer.getKdcSetting().getKdcRealm();
+        String nameString;
+        try {
+            InetAddress addr = InetAddress.getLocalHost();
+            String fqName = addr.getCanonicalHostName();
+            nameString = "HTTP/" + fqName + "@" + realm;
+        } catch (UnknownHostException e) {
+            throw new HasException(e);
+        }
+        LOG.info("The http principal name is: " + nameString);
+        return nameString;
+    }
+
+    /**
+     * Update conf file.
+     *
+     * @param confName  conf file name
+     * @param values    customized values
+     * @throws IOException throw IOException
+     * @throws KrbException e
+     */
+    public void updateConfFile(String confName, Map<String, String> values)
+        throws IOException, HasException {
+        File confFile = new File(getConfDir().getAbsolutePath(), confName);
+        if (confFile.exists()) {
+            // Update conf file content
+            InputStream templateResource;
+            if (confName.equals("has-server.conf")) {
+                templateResource = new FileInputStream(confFile);
+            } else {
+                String resourcePath = "/" + confName + ".template";
+                templateResource = getClass().getResourceAsStream(resourcePath);
+            }
+            String content = IOUtil.readInput(templateResource);
+            for (Map.Entry<String, String> entry : values.entrySet()) {
+                content = content.replaceAll(Pattern.quote(entry.getKey()), entry.getValue());
+            }
+
+            // Delete the original conf file
+            boolean delete = confFile.delete();
+            if (!delete) {
+                throw new HasException("Failed to delete conf file: " + confName);
+            }
+
+            // Save the updated conf file
+            IOUtil.writeFile(content, confFile);
+        } else {
+            throw new HasException("Conf file: " + confName + " not found.");
+        }
+    }
+
+    /**
+     * Get KDC Config from MySQL.
+     *
+     * @return Kdc config
+     * @throws KrbException e
+     */
+    private Map<String, String> getKdcConf() throws HasException {
+        PreparedStatement preStm = null;
+        ResultSet result = null;
+        Map<String, String> kdcConf = new HashMap<>();
+        BackendConfig backendConfig;
+        try {
+            backendConfig = KdcUtil.getBackendConfig(getConfDir());
+        } catch (KrbException e) {
+            throw new HasException("Getting backend config failed." + e.getMessage());
+        }
+        String driver = backendConfig.getString("mysql_driver");
+        String url = backendConfig.getString("mysql_url");
+        String user = backendConfig.getString("mysql_user");
+        String password = backendConfig.getString("mysql_password");
+        Connection connection = startConnection(driver, url, user, password);
+        try {
+
+            // Get Kdc configuration from kdc_config table
+            String stmKdc = "SELECT * FROM `kdc_config` WHERE id = 1";
+            preStm = connection.prepareStatement(stmKdc);
+            result = preStm.executeQuery();
+            while (result.next()) {
+                String realm = result.getString("realm");
+                String servers = result.getString("servers");
+                String port = String.valueOf(result.getInt("port"));
+                kdcConf.put("servers", servers);
+                kdcConf.put("_PORT_", port);
+                kdcConf.put("_REALM_", realm);
+            }
+
+        } catch (SQLException e) {
+            LOG.error("Error occurred while getting kdc config.");
+            throw new HasException("Failed to get kdc config. ", e);
+        } finally {
+            DbUtils.closeQuietly(preStm);
+            DbUtils.closeQuietly(result);
+            DbUtils.closeQuietly(connection);
+        }
+
+        return kdcConf;
+    }
+
+    /**
+     * Update KDC conf file.
+     *
+     * @throws KrbException e
+     */
+    private void updateKdcConf() throws HasException {
+        try {
+            Map<String, String> values = getKdcConf();
+            String host = getKdcHost();
+            if (host == null) {
+                host = getWebServer().getBindAddress().getHostName();
+            }
+            values.remove("servers");
+            values.put("_HOST_", host);
+            updateConfFile("kdc.conf", values);
+        } catch (IOException e) {
+            throw new HasException("Failed to update kdc config. ", e);
+        }
+    }
+
+    /**
+     * Start the MySQL connection.
+     *
+     * @param url url of connection
+     * @param user username of connection
+     * @param password password of connection
+     * @throws KrbException e
+     * @return MySQL JDBC connection
+     */
+    private Connection startConnection(String driver, String url, String user,
+                                       String password) throws HasException {
+        Connection connection;
+        try {
+            Class.forName(driver);
+            connection = DriverManager.getConnection(url, user, password);
+            if (!connection.isClosed()) {
+                LOG.info("Succeeded in connecting to MySQL.");
+            }
+        } catch (ClassNotFoundException e) {
+            throw new HasException("JDBC Driver Class not found. ", e);
+        } catch (SQLException e) {
+            throw new HasException("Failed to connecting to MySQL. ", e);
+        }
+
+        return connection;
+    }
+
+    /**
+     * Config HAS server KDC which have MySQL backend.
+     * @param backendConfig MySQL backend config
+     * @param realm KDC realm to set
+     * @param host KDC host to set
+     * @param hasServer has server to get param
+     * @throws HasException e
+     */
+    public void configMySQLKdc(BackendConfig backendConfig, String realm, int port,
+                               String host, HasServer hasServer) throws HasException {
+
+        // Start mysql connection
+        String driver = backendConfig.getString("mysql_driver");
+        String url = backendConfig.getString("mysql_url");
+        String user = backendConfig.getString("mysql_user");
+        String password = backendConfig.getString("mysql_password");
+        Connection connection = startConnection(driver, url, user, password);
+
+        ResultSet resConfig = null;
+        PreparedStatement preStm = null;
+        try {
+            createKdcTable(connection); // Create kdc_config table if not exists
+            String stm = "SELECT * FROM `kdc_config` WHERE id = 1";
+            preStm = connection.prepareStatement(stm);
+            resConfig = preStm.executeQuery();
+            if (!resConfig.next()) {
+                addKdcConfig(connection, realm, port, host);
+            } else {
+                String oldHost = hasServer.getKdcHost();
+                String servers = resConfig.getString("servers");
+                String[] serverArray = servers.split(",");
+                List<String> serverList = new ArrayList<>();
+                Collections.addAll(serverList, serverArray);
+                if (serverList.contains(oldHost)) {
+                    servers = servers.replaceAll(oldHost, host);
+                } else {
+                    servers = servers + "," + host;
+                }
+                boolean initialized = resConfig.getBoolean("initialized");
+                updateKdcConfig(connection, initialized, port, realm, servers);
+            }
+            hasServer.setKdcHost(host);
+        } catch (SQLException e) {
+            throw new HasException("Failed to config HAS KDC. ", e);
+        } finally {
+            DbUtils.closeQuietly(preStm);
+            DbUtils.closeQuietly(resConfig);
+            DbUtils.closeQuietly(connection);
+        }
+    }
+
+    /**
+     * Create kdc_config table in database.
+     * @param conn database connection
+     * @throws KrbException e
+     */
+    private void createKdcTable(final Connection conn) throws HasException {
+        PreparedStatement preStm = null;
+        try {
+            String stm = "CREATE TABLE IF NOT EXISTS `kdc_config` ("
+                + "port INTEGER DEFAULT 88, servers VARCHAR(255) NOT NULL, "
+                + "initialized bool DEFAULT FALSE, realm VARCHAR(255) "
+                + "DEFAULT NULL, id INTEGER DEFAULT 1, CHECK (id=1), PRIMARY KEY (id)) "
+                + "ENGINE=INNODB;";
+            preStm = conn.prepareStatement(stm);
+            preStm.executeUpdate();
+        } catch (SQLException e) {
+            throw new HasException("Failed to create kdc_config table. ", e);
+        } finally {
+            DbUtils.closeQuietly(preStm);
+        }
+    }
+
+    /**
+     * Add KDC Config information in database.
+     * @param conn database connection
+     * @param realm realm to add
+     * @param port port to add
+     * @param host host to add
+     */
+    private void addKdcConfig(Connection conn, String realm, int port, String host)
+        throws HasException {
+        PreparedStatement preStm = null;
+        try {
+            String stm = "INSERT INTO `kdc_config` (port, servers, realm)" + " VALUES(?, ?, ?)";
+            preStm = conn.prepareStatement(stm);
+            preStm.setInt(1, port);
+            preStm.setString(2, host);
+            preStm.setString(3, realm);
+            preStm.executeUpdate();
+        } catch (SQLException e) {
+            throw new HasException("Failed to insert into kdc_config table. ", e);
+        } finally {
+            DbUtils.closeQuietly(preStm);
+        }
+    }
+
+    /**
+     * Update KDC Config record in database.
+     * @param conn database connection
+     * @param realm realm to update
+     * @param port port to update
+     * @param servers servers to update
+     * @param initialized initial state of KDC Config
+     */
+    private void updateKdcConfig(Connection conn, boolean initialized, int port,
+                                 String realm, String servers) throws HasException {
+        PreparedStatement preStm = null;
+        try {
+            if (initialized) {
+                String stmUpdate = "UPDATE `kdc_config` SET servers = ? WHERE id = 1";
+                preStm = conn.prepareStatement(stmUpdate);
+                preStm.setString(1, servers);
+                preStm.executeUpdate();
+            } else {
+                String stmUpdate = "UPDATE `kdc_config` SET port = ?, realm = ?, servers = ? WHERE id = 1";
+                preStm = conn.prepareStatement(stmUpdate);
+                preStm.setInt(1, port);
+                preStm.setString(2, realm);
+                preStm.setString(3, servers);
+                preStm.executeUpdate();
+            }
+        } catch (SQLException e) {
+            throw new HasException("Failed to update KDC Config. ", e);
+        } finally {
+            DbUtils.closeQuietly(preStm);
+        }
+    }
+
+    /**
+     * Read in krb5-template.conf and substitute in the correct port.
+     *
+     * @return krb5 conf file
+     * @throws IOException e
+     * @throws KrbException e
+     */
+    public File generateKrb5Conf() throws HasException {
+        Map<String, String> kdcConf = getKdcConf();
+        String[] servers = kdcConf.get("servers").split(",");
+        int kdcPort = Integer.parseInt(kdcConf.get("_PORT_"));
+        String kdcRealm = kdcConf.get("_REALM_");
+        StringBuilder kdcBuilder = new StringBuilder();
+        for (String server : servers) {
+            String append = "\t\tkdc = " + server.trim() + ":" + kdcPort + "\n";
+            kdcBuilder.append(append);
+        }
+        String kdc = kdcBuilder.toString();
+        kdc = kdc.substring(0, kdc.length() - 1);
+        String resourcePath = "/krb5.conf.template";
+        InputStream templateResource = getClass().getResourceAsStream(resourcePath);
+        String content = null;
+        try {
+            content = IOUtil.readInput(templateResource);
+        } catch (IOException e) {
+            throw new HasException("Read template resource failed. " + e);
+        }
+        content = content.replaceAll("_REALM_", kdcRealm);
+        content = content.replaceAll("_PORT_", String.valueOf(kdcPort));
+        content = content.replaceAll("_UDP_LIMIT_", "4096");
+        content = content.replaceAll("_KDCS_", kdc);
+        File confFile = new File(confDir, "krb5.conf");
+        if (confFile.exists()) {
+            boolean delete = confFile.delete();
+            if (!delete) {
+                throw new HasException("File delete error!");
+            }
+        }
+        try {
+            IOUtil.writeFile(content, confFile);
+        } catch (IOException e) {
+            throw new HasException("Write content to conf file failed. " + e);
+        }
+
+        return confFile;
+    }
+
+    /**
+     * Read in has-server.conf and create has-client.conf.
+     *
+     * @return has conf file
+     * @throws IOException e
+     * @throws HasException e
+     */
+    public File generateHasConf() throws HasException, IOException {
+        Map<String, String> kdcConf = getKdcConf();
+        String servers = kdcConf.get("servers");
+        File confFile = new File(getConfDir().getAbsolutePath(), "has-server.conf");
+        HasConfig hasConfig = HasUtil.getHasConfig(confFile);
+        if (hasConfig != null) {
+            String defaultValue = hasConfig.getHttpsHost();
+            InputStream templateResource = new FileInputStream(confFile);
+            String content = IOUtil.readInput(templateResource);
+            content = content.replaceFirst(Pattern.quote(defaultValue), servers);
+            File hasFile = new File(confDir, "has-client.conf");
+            IOUtil.writeFile(content, hasFile);
+            return hasFile;
+        } else {
+            throw new HasException("has-server.conf not found. ");
+        }
+    }
+
+    public void stopKdcServer() {
+        try {
+            kdcServer.stop();
+        } catch (KrbException e) {
+            LOG.error("Fail to stop has kdc server");
+        }
+    }
+
+    public void startWebServer() throws HasException {
+        if (webServer == null) {
+            HasConfig conf = new HasConfig();
+
+            // Parse has-server.conf to get http_host and http_port
+            File confFile = new File(confDir, "has-server.conf");
+            hasConfig = HasUtil.getHasConfig(confFile);
+            if (hasConfig != null) {
+                try {
+                    String httpHost;
+                    String httpPort;
+                    String httpsHost;
+                    String httpsPort;
+                    if (hasConfig.getHttpHost() != null) {
+                        httpHost = hasConfig.getHttpHost();
+                    } else {
+                        LOG.info("Cannot get the http_host from has-server.conf, using the default http host.");
+                        httpHost = WebConfigKey.HAS_HTTP_HOST_DEFAULT;
+                    }
+                    if (hasConfig.getHttpPort() != null) {
+                        httpPort = hasConfig.getHttpPort();
+                    } else {
+                        LOG.info("Cannot get the http_port from has-server.conf, using the default http port.");
+                        httpPort = String.valueOf(WebConfigKey.HAS_HTTP_PORT_DEFAULT);
+                    }
+                    if (hasConfig.getHttpsHost() != null) {
+                        httpsHost = hasConfig.getHttpsHost();
+                    } else {
+                        LOG.info("Cannot get the https_host from has-server.conf, using the default https host.");
+                        httpsHost = WebConfigKey.HAS_HTTPS_HOST_DEFAULT;
+                    }
+                    if (hasConfig.getHttpsPort() != null) {
+                        httpsPort = hasConfig.getHttpsPort();
+                    } else {
+                        LOG.info("Cannot get the https_port from has-server.conf , using the default https port.");
+                        httpsPort = String.valueOf(WebConfigKey.HAS_HTTPS_PORT_DEFAULT);
+                    }
+                    String hasHttpAddress = httpHost + ":" + httpPort;
+                    String hasHttpsAddress = httpsHost + ":" + httpsPort;
+                    LOG.info("The web server http address: " + hasHttpAddress);
+                    LOG.info("The web server https address: " + hasHttpsAddress);
+
+                    conf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY, hasHttpAddress);
+                    conf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, hasHttpsAddress);
+                    conf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY,
+                        HttpConfig.Policy.HTTP_AND_HTTPS.name());
+                    conf.setString(WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+                        hasConfig.getSslServerConf());
+                    webServer = new WebServer(conf);
+                } catch (NumberFormatException e) {
+                    throw new IllegalArgumentException("https_port should be a number. "
+                        + e.getMessage());
+                }
+            } else {
+                throw new HasException("has-server.conf not found in " + confDir + ". ");
+            }
+        } else {
+            hasConfig = webServer.getConf();
+        }
+        webServer.start();
+        webServer.defineConfFilter();
+        try {
+            HasUtil.setEnableConf(new File(confDir, "has-server.conf"), "true");
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when enable conf. " + e.getMessage());
+        }
+        webServer.setWebServerAttribute(this);
+    }
+
+    public void stopWebServer() {
+        if (webServer != null) {
+            try {
+                webServer.stop();
+            } catch (Exception e) {
+                LOG.error("Failed to stop http server. " + e.getMessage());
+            }
+        }
+    }
+
+    public static void main(String[] args) {
+        if (args[0].equals("-start")) {
+            String confDirPath = args[1];
+            String workDirPath = args[2];
+            File confDir = new File(confDirPath);
+            File workDir = new File(workDirPath);
+            if (!confDir.exists() || !workDir.exists()) {
+                LOG.error("Invalid or not exist conf-dir or work-dir");
+                System.exit(3);
+            }
+            try {
+                server = new HasServer(confDir);
+            } catch (KrbException e) {
+                LOG.error("Errors occurred when create kdc server:  " + e.getMessage());
+                System.exit(4);
+            }
+            server.setConfDir(confDir);
+            server.setWorkDir(workDir);
+            //Only start the webserver, the kdcserver can start after setting the realm
+            try {
+                server.startWebServer();
+            } catch (HasException e) {
+                LOG.error("Errors occurred when start has http server:  " + e.getMessage());
+                System.exit(6);
+            }
+
+            if (server.getWebServer().getHttpAddress() != null) {
+                LOG.info("HAS http server started.");
+                LOG.info("host: " + server.getWebServer().getHttpAddress().getHostName());
+                LOG.info("port: " + server.getWebServer().getHttpAddress().getPort());
+            }
+            if (server.getWebServer().getHttpsAddress() != null) {
+                LOG.info("HAS https server started.");
+                LOG.info("host: " + server.getWebServer().getHttpsAddress().getHostName());
+                LOG.info("port: " + server.getWebServer().getHttpsAddress().getPort());
+            }
+        } else if (args[0].equals("-stop")) {
+            if (server != null) {
+                server.stopWebServer();
+                server.stopKdcServer();
+            }
+        } else {
+            System.exit(2);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
new file mode 100644
index 0000000..6650308
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+
+public interface HasServerPlugin {
+        /**
+         * Get the login module type ID, used to distinguish this module from others.
+         * Should correspond to the client side module.
+         *
+         * @return login type
+         */
+        String getLoginType();
+
+        /**
+         * Perform all the server side authentication logics, the results wrapped in an AuthToken,
+         * will be used to exchange a Kerberos ticket.
+         *
+         * @param userToken user token
+         * @return auth token
+         */
+        AuthToken authenticate(AuthToken userToken) throws HasAuthenException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
new file mode 100644
index 0000000..621b321
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.hadoop.has.common.HasException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.ServiceLoader;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class HasServerPluginRegistry {
+    static final Logger LOG = LoggerFactory.getLogger(HasServerPluginRegistry.class);
+
+    private static Map<String, Class> allPlugins = new ConcurrentHashMap<>();
+
+    static {
+        ServiceLoader<HasServerPlugin> plugins = ServiceLoader.load(HasServerPlugin.class);
+
+        for (HasServerPlugin plugin : plugins) {
+            allPlugins.put(plugin.getLoginType(), plugin.getClass());
+        }
+    }
+
+    public static Set<String> registeredPlugins() {
+        return Collections.unmodifiableSet(allPlugins.keySet());
+    }
+
+    public static boolean registeredPlugin(String name) {
+        return allPlugins.containsKey(name);
+    }
+
+    public static HasServerPlugin createPlugin(String name) throws HasException {
+        if (!registeredPlugin(name)) {
+            throw new HasException("Unregistered plugin " + name);
+        }
+        try {
+            HasServerPlugin serverPlugin = (HasServerPlugin) allPlugins.get(name).newInstance();
+            return serverPlugin;
+        } catch (Exception e) {
+            LOG.error("Create {} plugin failed", name, e);
+            throw new HasException(e.getMessage());
+        }
+    }
+}


[04/10] directory-kerby git commit: Add the HAS project to Kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java
new file mode 100644
index 0000000..1b84639
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java
@@ -0,0 +1,455 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.HasServer;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+import org.apache.hadoop.has.server.web.HostRoleType;
+import org.apache.hadoop.has.server.web.WebServer;
+import org.apache.hadoop.has.server.web.rest.param.HostParam;
+import org.apache.hadoop.has.server.web.rest.param.HostRoleParam;
+import org.apache.hadoop.has.server.web.rest.param.PasswordParam;
+import org.apache.hadoop.has.server.web.rest.param.PrincipalParam;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+/**
+ * HAS HasAdmin web methods implementation.
+ */
+@Path("/admin")
+public class HadminApi {
+
+    @Context
+    private ServletContext context;
+
+    @Context
+    private HttpServletRequest httpRequest;
+
+    private void compressFile(File file, ZipOutputStream out, String basedir) {
+        if (!file.exists()) {
+            return;
+        }
+        try {
+            BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
+            ZipEntry entry = new ZipEntry(basedir + file.getName());
+            out.putNextEntry(entry);
+            int count;
+            byte[] data = new byte[8192];
+            while ((count = bis.read(data, 0, 8192)) != -1) {
+                out.write(data, 0, count);
+            }
+            bis.close();
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * @param host Hadoop node
+     * @param role Hadoop role
+     * @return Response
+     */
+    @GET
+    @Path("/exportkeytabs")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response exportKeytabs(@QueryParam(HostParam.NAME) @DefaultValue(HostParam.DEFAULT)
+                                  final HostParam host,
+                                  @QueryParam(HostRoleParam.NAME) @DefaultValue(HostRoleParam.DEFAULT)
+                                  final HostRoleParam role) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to export keytabs.");
+            LocalHasAdmin hasAdmin = null;
+            HasServer hasServer = null;
+            try {
+                hasServer = WebServer.getHasServerFromContext(context);
+                hasAdmin = new LocalHasAdmin(hasServer);
+            } catch (KrbException e) {
+                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+            }
+            if (host.getValue() != null) {
+                if (role.getValue() != null) {
+                    try {
+                        File file = hasAdmin.getKeytabByHostAndRole(host.getValue(), role.getValue());
+                        WebServer.LOG.info("Create keytab file for the " + role.getValue()
+                            + " for " + host.getValue());
+                        return Response.ok(file).header("Content-Disposition",
+                            "attachment; filename=" + role.getValue() + "-"
+                                + host.getValue() + ".keytab").build();
+                    } catch (HasException e) {
+                        WebServer.LOG.error("Failed to export keytab File because : " + e.getMessage());
+                    }
+                } else {
+                    //export keytabs zip file
+                    List<File> keytabs = new ArrayList<>();
+                    for (HostRoleType r : HostRoleType.values()) {
+                        try {
+                            keytabs.add(hasAdmin.getKeytabByHostAndRole(host.getValue(), r.getName()));
+                            WebServer.LOG.info("Create keytab file for the " + r.getName()
+                                + " for " + host.getValue());
+                        } catch (HasException e) {
+                            WebServer.LOG.info("Failed to export keytab File because : " + e.getMessage());
+                        }
+                    }
+                    if (keytabs.size() < 1) {
+                        return Response.serverError().build();
+                    }
+                    File path = new File(hasServer.getWorkDir(), "tmp/zip/"
+                        + System.currentTimeMillis());
+                    path.mkdirs();
+                    File keytabZip = new File(path, "keytab.zip");
+                    if (keytabZip.exists()) {
+                        keytabZip.delete();
+                    }
+                    try {
+                        ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(keytabZip));
+                        for (File keytab : keytabs) {
+                            compressFile(keytab, zos, "");
+                        }
+                        zos.close();
+                        WebServer.LOG.info("Success to create the keytab.zip.");
+                        return Response.ok(keytabZip).header("Content-Disposition",
+                            "attachment; filename=keytab.zip").build();
+                    } catch (Exception e) {
+                        WebServer.LOG.error("Failed to create the keytab.zip,because : " + e.getMessage());
+                    }
+                }
+            }
+            return Response.serverError().build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * export single keytab file
+     *
+     * @param principal principal name to export keytab file
+     * @return Response
+     */
+    @GET
+    @Path("/exportkeytab")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response exportKeytab(@QueryParam("principal") final String principal) {
+        if (httpRequest.isSecure()) {
+            LocalHasAdmin hasAdmin = null;
+            WebServer.LOG.info("Exporting keytab file for " + principal + "...");
+            try {
+                HasServer hasServer = WebServer.getHasServerFromContext(context);
+                hasAdmin = new LocalHasAdmin(hasServer);
+            } catch (KrbException e) {
+                WebServer.LOG.error("Failed to create local hadmin." + e.getMessage());
+            }
+            WebServer.LOG.info("Create keytab file for " + principal + " successfully.");
+            if (principal != null) {
+                try {
+                    File path = new File("/tmp/" + System.currentTimeMillis());
+                    if (path.mkdirs()) {
+                        File keytabFile = new File(path, principal + ".keytab");
+                        hasAdmin.exportKeytab(keytabFile, principal);
+                        return Response.ok(keytabFile).header("Content-Disposition", "attachment; filename="
+                            + keytabFile.getName()).build();
+                    }
+                } catch (HasException e) {
+                    WebServer.LOG.error("Failed to export keytab. " + e.toString());
+                    return Response.serverError().build();
+                }
+            }
+            return Response.serverError().build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @PUT
+    @Path("/setconf")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response setConf(@QueryParam("isEnable") String isEnable) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to admin/setconf.");
+            final HasServer hasServer = WebServer.getHasServerFromContext(
+                context);
+            File hasConf = new File(hasServer.getConfDir(), "has-server.conf");
+            if (!hasConf.exists()) {
+                WebServer.LOG.error("has-server.conf is not exists.");
+                return Response.serverError().entity("has-server.conf is not exists.")
+                    .build();
+            }
+            String result = "";
+            if (isEnable.equals("true")) {
+                result = "enable";
+            } else if (isEnable.equals("false")) {
+                result = "disable";
+            } else {
+                WebServer.LOG.error("Value of isEnable is error.");
+                return Response.serverError().entity("Value of isEnable is error.")
+                    .build();
+            }
+            try {
+                HasUtil.setEnableConf(hasConf, isEnable);
+            } catch (Exception e) {
+                WebServer.LOG.error(e.getMessage());
+                return Response.serverError().entity(e.getMessage()).build();
+            }
+            return Response.ok("Set conf to " + result).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @GET
+    @Path("/getprincipals")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response getprincipals(@QueryParam("exp") String exp) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to get principals.");
+            JSONObject result = new JSONObject();
+            String msg;
+            LocalHasAdmin hasAdmin = null;
+            try {
+                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
+            } catch (KrbException e) {
+                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+            }
+            try {
+                JSONArray principals = new JSONArray();
+                List<String> princList = hasAdmin.getPrincipals(exp);
+                for (String princ : princList) {
+                    principals.put(princ);
+                }
+                WebServer.LOG.info("Success to get principals with JSON.");
+                result.put("result", "success");
+                result.put("msg", principals.toString());
+                return Response.ok(result.toString()).build();
+            } catch (Exception e) {
+                WebServer.LOG.error("Failed to get principals,because : " + e.getMessage());
+                msg = "Failed to get principals,because : " + e.getMessage();
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Add principal by name and password.
+     *
+     * @param principal principal name.
+     * @param password  principal password
+     * @return Response
+     */
+    @POST
+    @Path("/addprincipal")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response addprincipal(@QueryParam(PrincipalParam.NAME) @DefaultValue(PrincipalParam.DEFAULT)
+                                 final PrincipalParam principal,
+                                 @QueryParam(PasswordParam.NAME) @DefaultValue(PasswordParam.DEFAULT)
+                                 final PasswordParam password) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to add the principal named " + principal.getValue());
+            LocalHasAdmin hasAdmin = null;
+            try {
+                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
+            } catch (KrbException e) {
+                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+            }
+            JSONObject result = new JSONObject();
+            String msg = "Add principal successfully.";
+            try {
+                hasAdmin.addPrincipal(principal.getValue(), password.getValue());
+                result.put("result", "success");
+                result.put("msg", msg);
+                return Response.ok(result.toString()).build();
+            } catch (Exception e) {
+                WebServer.LOG.error("Failed to add " + principal + " principal, because: " + e.getMessage());
+                msg = "Failed to add " + principal + " principal, because: " + e.getMessage();
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @POST
+    @Path("/renameprincipal")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response renamePrincipal(@QueryParam("oldprincipal") String oldPrincipal,
+                                    @QueryParam("newprincipal") String newPrincipal) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to rename " + oldPrincipal + " to " + newPrincipal);
+            JSONObject result = new JSONObject();
+            String msg = "Rename principal successfully.";
+            if (oldPrincipal != null && newPrincipal != null) {
+                LocalHasAdmin hasAdmin = null;
+                try {
+                    hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
+                } catch (KrbException e) {
+                    WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+                }
+                try {
+                    hasAdmin.renamePrincipal(oldPrincipal, newPrincipal);
+                    result.put("result", "success");
+                    result.put("msg", msg);
+                    return Response.ok(result.toString()).build();
+                } catch (Exception e) {
+                    WebServer.LOG.error("Failed to rename principal " + oldPrincipal + " to "
+                        + newPrincipal + ",because: " + e.getMessage());
+                    msg = "Failed to rename principal " + oldPrincipal + " to "
+                        + newPrincipal + ",because: " + e.getMessage();
+                }
+            } else {
+                WebServer.LOG.error("Value of old or new principal is null.");
+                msg = "Value of old or new principal is null.";
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Delete principal by name.
+     *
+     * @param principal principal like "admin" or "admin@HADOOP.COM".
+     * @return Response
+     */
+    @DELETE
+    @Path("/deleteprincipal")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response deleteprincipal(@QueryParam(PrincipalParam.NAME) @DefaultValue(PrincipalParam.DEFAULT)
+                                    final PrincipalParam principal) {
+        if (httpRequest.isSecure()) {
+            WebServer.LOG.info("Request to delete the principal named " + principal.getValue());
+            JSONObject result = new JSONObject();
+            String msg = "Delete principal successfully.";
+            LocalHasAdmin hasAdmin = null;
+            try {
+                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
+            } catch (KrbException e) {
+                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+            }
+            try {
+                hasAdmin.deletePrincipal(principal.getValue());
+                result.put("result", "success");
+                result.put("msg", msg);
+                return Response.ok(result.toString()).build();
+            } catch (Exception e) {
+                WebServer.LOG.error("Failed to delete the principal named " + principal.getValue()
+                    + ",because : " + e.getMessage());
+                msg = "Failed to delete the principal named " + principal.getValue()
+                    + ",because : " + e.getMessage();
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @PUT
+    @Path("/createprincipals")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response createprincipals(@Context HttpServletRequest request) {
+        if (httpRequest.isSecure()) {
+            LocalHasAdmin hasAdmin = null;
+            try {
+                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
+            } catch (KrbException e) {
+                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
+            }
+            JSONObject result = new JSONObject();
+            String msg = "";
+            try {
+                StringBuilder data = new StringBuilder();
+                BufferedReader br = new BufferedReader(new InputStreamReader(request.getInputStream()));
+                String s;
+                while ((s = br.readLine()) != null) {
+                    data.append(s);
+                }
+                WebServer.LOG.info("Request to create principals by JSON : \n" + data.toString());
+                JSONArray hostArray = new JSONObject(data.toString()).optJSONArray("HOSTS");
+                for (int i = 0; i < hostArray.length(); i++) {
+                    JSONObject host = (JSONObject) hostArray.get(i);
+                    String[] roles = host.getString("hostRoles").split(",");
+                    for (String role : roles) {
+                        msg += hasAdmin.addPrincByRole(host.getString("name"), role.toUpperCase());
+                    }
+                }
+                result.put("result", "success");
+                result.put("msg", msg);
+                return Response.ok(result.toString()).build();
+            } catch (Exception e) {
+                WebServer.LOG.error("Failed to create principals,because : " + e.getMessage());
+                msg = "Failed to create principals,because : " + e.getMessage();
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java
new file mode 100644
index 0000000..a1eb958
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java
@@ -0,0 +1,336 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.HasAuthenException;
+import org.apache.hadoop.has.server.HasServer;
+import org.apache.hadoop.has.server.HasServerPlugin;
+import org.apache.hadoop.has.server.HasServerPluginRegistry;
+import org.apache.hadoop.has.server.kdc.HasKdcHandler;
+import org.apache.hadoop.has.server.web.HostRoleType;
+import org.apache.hadoop.has.server.web.WebServer;
+import org.apache.hadoop.has.server.web.rest.param.AuthTokenParam;
+import org.apache.hadoop.has.server.web.rest.param.TypeParam;
+import org.apache.hadoop.http.JettyUtils;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.KrbRuntime;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.provider.TokenDecoder;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * HAS web methods implementation.
+ */
+@Path("")
+public class HasApi {
+
+    @Context
+    private ServletContext context;
+
+    @Context
+    private HttpServletRequest httpRequest;
+
+    /**
+     * Get krb5.conf file.
+     *
+     * @return Response
+     */
+    @GET
+    @Path("/getkrb5conf")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response getKrb5Conf() {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            try {
+                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
+                String backendJar = backendConfig.getString("kdc_identity_backend");
+                File conf;
+                if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
+                    conf = hasServer.generateKrb5Conf();
+                } else {
+                    File confDir = hasServer.getConfDir();
+                    conf = new File(confDir, "krb5.conf");
+                }
+                return Response.ok(conf).header("Content-Disposition", "attachment; filename=krb5.conf").build();
+            } catch (KrbException | HasException e) {
+                throw new RuntimeException("Failed to get Krb5.conf. ", e);
+            }
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Get has-client.conf file.
+     *
+     * @return Response
+     */
+    @GET
+    @Path("/gethasconf")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response getHasConf() {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            try {
+                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
+                String backendJar = backendConfig.getString("kdc_identity_backend");
+                File conf;
+                if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
+                    conf = hasServer.generateHasConf();
+                } else {
+                    File confDir = hasServer.getConfDir();
+                    conf = new File(confDir, "has-server.conf");
+                }
+                return Response.ok(conf).header("Content-Disposition", "attachment; filename=has-client.conf").build();
+            } catch (IOException | KrbException | HasException e) {
+                throw new RuntimeException("Failed to get has-client.conf. ", e);
+            }
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Get CA file.
+     *
+     * @return Response
+     */
+    @GET
+    @Path("/getcert")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response getCert() {
+        final HasServer hasServer = WebServer.getHasServerFromContext(context);
+        String errMessage = null;
+        File cert = null;
+        try {
+            HasConfig hasConfig = HasUtil.getHasConfig(
+                new File(hasServer.getConfDir(), "has-server.conf"));
+            if (hasConfig != null) {
+                String certPath = hasConfig.getSslClientCert();
+                cert = new File(certPath);
+                if (!cert.exists()) {
+                    errMessage = "Cert file not found in HAS server.";
+                    WebServer.LOG.error("Cert file not found in HAS server.");
+                }
+            } else {
+                errMessage = "has-server.conf not found.";
+                WebServer.LOG.error("has-server.conf not found.");
+            }
+        } catch (HasException e) {
+            errMessage = "Failed to get cert file" + e.getMessage();
+            WebServer.LOG.error("Failed to get cert file" + e.getMessage());
+        }
+        if (errMessage == null) {
+            return Response.ok(cert).header("Content-Disposition",
+                "attachment;filename=" + cert.getName()).build();
+        } else {
+            return Response.status(Response.Status.NOT_FOUND).entity(errMessage).build();
+        }
+    }
+
+    @GET
+    @Path("/hostroles")
+    @Produces(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8)
+    public Response getRoles() {
+        if (httpRequest.isSecure()) {
+            JSONArray result = new JSONArray();
+            try {
+                for (HostRoleType role : HostRoleType.values()) {
+                    JSONObject jso = new JSONObject();
+                    jso.put("HostRole", role.getName());
+                    JSONArray jsa = new JSONArray();
+                    String[] princs = role.getPrincs();
+                    for (String princ : princs) {
+                        jsa.put(princ);
+                    }
+                    jso.put("PrincipalNames", jsa);
+                    result.put(jso);
+                }
+                return Response.ok(result.toString() + "\n").type(MediaType.APPLICATION_JSON).build();
+            } catch (Exception e) {
+                WebServer.LOG.error("Failed to get host roles." + e.getMessage());
+            }
+            return Response.serverError().build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @GET
+    @Path("/kdcinit")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response kdcInit() {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            try {
+                File adminKeytab = hasServer.initKdcServer();
+                return Response.ok(adminKeytab).header("Content-Disposition",
+                    "attachment; filename=" + adminKeytab.getName()).build();
+            } catch (KrbException e) {
+                System.err.println("[ERROR] " + e.getMessage());
+            }
+            return Response.serverError().build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    @GET
+    @Path("/kdcstart")
+    @Produces(MediaType.TEXT_PLAIN)
+    public Response kdcStart() {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            JSONObject result = new JSONObject();
+            String msg = "Succeed in starting KDC server.";
+
+            try {
+                hasServer.startKdcServer();
+            } catch (HasException e) {
+                WebServer.LOG.error("Fail to start kdc server. " + e.getMessage());
+                msg = e.getMessage();
+            }
+            try {
+                result.put("result", "success");
+                result.put("msg", msg);
+                return Response.ok(result.toString()).build();
+            } catch (Exception e) {
+                WebServer.LOG.error(e.getMessage());
+                msg = e.getMessage();
+            }
+            try {
+                result.put("result", "error");
+                result.put("msg", msg);
+            } catch (JSONException e) {
+                WebServer.LOG.error(e.getMessage());
+            }
+            return Response.ok(result.toString()).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Handle HTTP PUT request.
+     */
+    @PUT
+    @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8,
+        MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8})
+    public Response asRequest(
+        @QueryParam(TypeParam.NAME) @DefaultValue(TypeParam.DEFAULT)
+        final TypeParam type,
+        @QueryParam(AuthTokenParam.NAME) @DefaultValue(AuthTokenParam.DEFAULT)
+        final AuthTokenParam authToken
+    ) {
+        return asRequest(type.getValue(), authToken.getValue());
+    }
+
+    private Response asRequest(String type, String tokenStr) {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            String errMessage = null;
+            String js = null;
+            ObjectMapper mapper = new ObjectMapper();
+            final Map<String, Object> m = new TreeMap<String, Object>();
+
+            if (hasServer.getKdcServer() == null) {
+                errMessage = "Please start the has KDC server.";
+            } else if (!tokenStr.isEmpty() && tokenStr != null) {
+                HasKdcHandler kdcHandler = new HasKdcHandler(hasServer);
+
+                TokenDecoder tokenDecoder = KrbRuntime.getTokenProvider("JWT").createTokenDecoder();
+
+                AuthToken authToken = null;
+                try {
+                    authToken = tokenDecoder.decodeFromString(tokenStr);
+                } catch (IOException e) {
+                    errMessage = "Failed to decode the token string." + e.getMessage();
+                    WebServer.LOG.error(errMessage);
+                }
+                HasServerPlugin tokenPlugin = null;
+                try {
+                    tokenPlugin = HasServerPluginRegistry.createPlugin(type);
+                } catch (HasException e) {
+                    errMessage = "Fail to get the plugin: " + type + ". " + e.getMessage();
+                    WebServer.LOG.error(errMessage);
+                }
+                AuthToken verifiedAuthToken;
+                try {
+                    verifiedAuthToken = tokenPlugin.authenticate(authToken);
+                } catch (HasAuthenException e) {
+                    errMessage = "Failed to verify auth token: " + e.getMessage();
+                    WebServer.LOG.error(errMessage);
+                    verifiedAuthToken = null;
+                }
+
+                if (verifiedAuthToken != null) {
+                    KrbMessage asRep = kdcHandler.getResponse(verifiedAuthToken,
+                        (String) verifiedAuthToken.getAttributes().get("passPhrase"));
+
+                    Base64 base64 = new Base64(0);
+                    try {
+                        m.put("type", tokenPlugin.getLoginType());
+                        m.put("success", "true");
+                        m.put("krbMessage", base64.encodeToString(asRep.encode()));
+                    } catch (IOException e) {
+                        errMessage = "Failed to encode KrbMessage." + e.getMessage();
+                        WebServer.LOG.error(errMessage);
+                    }
+
+                }
+            } else {
+                errMessage = "The token string should not be empty.";
+                WebServer.LOG.error(errMessage);
+            }
+
+            if (errMessage != null) {
+                m.put("success", "false");
+                m.put("krbMessage", errMessage);
+            }
+            try {
+                js = mapper.writeValueAsString(m);
+            } catch (JsonProcessingException e) {
+                WebServer.LOG.error("Failed write values to string." + e.getMessage());
+            }
+            return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java
new file mode 100644
index 0000000..1df0312
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest.param;
+
+public class AuthTokenParam extends StringParam {
+  /**
+   * Parameter name.
+   */
+  public static final String NAME = "authToken";
+  /**
+   * Default parameter value.
+   */
+  public static final String DEFAULT = "";
+
+  private static final StringParam.Domain DOMAIN = new StringParam.Domain(NAME, null);
+
+  /**
+   * Constructor.
+   *
+   * @param str a string representation of the parameter value.
+   */
+  public AuthTokenParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java
new file mode 100644
index 0000000..6852ca7
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest.param;
+
+import org.apache.hadoop.util.StringUtils;
+
+import java.util.Arrays;
+
+abstract class EnumParam<E extends Enum<E>>
+    extends Param<E, EnumParam.Domain<E>> {
+  EnumParam(final Domain<E> domain, final E value) {
+    super(domain, value);
+  }
+
+  /**
+   * The domain of the parameter.
+   */
+  static final class Domain<E extends Enum<E>> extends Param.Domain<E> {
+    private final Class<E> enumClass;
+
+    Domain(String name, Class<E> enumClass) {
+      super(name);
+      this.enumClass = enumClass;
+    }
+
+    @Override
+    public String getDomain() {
+      return Arrays.asList(enumClass.getEnumConstants()).toString();
+    }
+
+    @Override
+    E parse(String str) {
+      return Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java
new file mode 100644
index 0000000..ee66ede
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest.param;
+
+public class HostParam extends StringParam {
+  /**
+   * Parameter name.
+   */
+  public static final String NAME = "host";
+  /**
+   * Default parameter value.
+   */
+  public static final String DEFAULT = "";
+
+  private static final Domain DOMAIN = new Domain(NAME, null);
+
+  /**
+   * Constructor.
+   *
+   * @param str a string representation of the parameter value.
+   */
+  public HostParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java
new file mode 100644
index 0000000..07e481f
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest.param;
+
+public class HostRoleParam extends StringParam {
+  /**
+   * Parameter name.
+   */
+  public static final String NAME = "role";
+  /**
+   * Default parameter value.
+   */
+  public static final String DEFAULT = "";
+
+  private static final Domain DOMAIN = new Domain(NAME, null);
+
+  /**
+   * Constructor.
+   *
+   * @param str a string representation of the parameter value.
+   */
+  public HostRoleParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java
new file mode 100644
index 0000000..5e43683
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest.param;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URLEncoder;
+import java.util.Arrays;
+import java.util.Comparator;
+
+/**
+ * Base class of parameters.
+ */
+public abstract class Param<T, D extends Param.Domain<T>> {
+  static final String NULL = "null";
+
+  static final Comparator<Param<?, ?>> NAME_CMP = new Comparator<Param<?, ?>>() {
+    @Override
+    public int compare(Param<?, ?> left, Param<?, ?> right) {
+      return left.getName().compareTo(right.getName());
+    }
+  };
+
+  /** Convert the parameters to a sorted String.
+   *
+   * @param separator URI parameter separator character
+   * @param parameters parameters to encode into a string
+   * @return the encoded URI string
+   */
+  public static String toSortedString(final String separator,
+                                      final Param<?, ?>... parameters) {
+    Arrays.sort(parameters, NAME_CMP);
+    final StringBuilder b = new StringBuilder();
+    try {
+      for (Param<?, ?> p : parameters) {
+        if (p.getValue() != null) {
+          b.append(separator)
+              .append(URLEncoder.encode(p.getName(), "UTF-8"))
+              .append("=")
+              .append(URLEncoder.encode(p.getValueString(), "UTF-8"));
+        }
+      }
+    } catch (UnsupportedEncodingException e) {
+      // Sane systems know about UTF-8, so this should never happen.
+      throw new RuntimeException(e);
+    }
+    return b.toString();
+  }
+
+  /** The domain of the parameter. */
+  final D domain;
+  /** The actual parameter value. */
+  final T value;
+
+  Param(final D domain, final T value) {
+    this.domain = domain;
+    this.value = value;
+  }
+
+  /** @return the parameter value. */
+  public final T getValue() {
+    return value;
+  }
+
+  /** @return the parameter value as a string */
+  public abstract String getValueString();
+
+  /** @return the parameter name. */
+  public abstract String getName();
+
+  @Override
+  public String toString() {
+    return getName() + "=" + value;
+  }
+
+  /** Base class of parameter domains. */
+  abstract static class Domain<T> {
+    /** Parameter name. */
+    final String paramName;
+
+    Domain(final String paramName) {
+      this.paramName = paramName;
+    }
+
+    /** @return the parameter name. */
+    public final String getParamName() {
+      return paramName;
+    }
+
+    /** @return a string description of the domain of the parameter. */
+    public abstract String getDomain();
+
+    /** @return the parameter value represented by the string. */
+    abstract T parse(String str);
+
+    /** Parse the given string.
+     * @return the parameter value represented by the string.
+     */
+    public final T parse(final String varName, final String str) {
+      try {
+        return str != null && str.trim().length() > 0 ? parse(str) : null;
+      } catch (Exception e) {
+        throw new IllegalArgumentException("Failed to parse \"" + str
+            + "\" for the parameter " + varName
+            + ".  The value must be in the domain " + getDomain(), e);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java
new file mode 100644
index 0000000..045cc96
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest.param;
+
+public class PasswordParam extends StringParam {
+  /**
+   * Parameter name.
+   */
+  public static final String NAME = "password";
+  /**
+   * Default parameter value.
+   */
+  public static final String DEFAULT = "";
+
+  private static final Domain DOMAIN = new Domain(NAME, null);
+
+  /**
+   * Constructor.
+   *
+   * @param str a string representation of the parameter value.
+   */
+  public PasswordParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java
new file mode 100644
index 0000000..cabca21
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest.param;
+
+public class PrincipalParam extends StringParam {
+  /**
+   * Parameter name.
+   */
+  public static final String NAME = "principal";
+  /**
+   * Default parameter value.
+   */
+  public static final String DEFAULT = "";
+
+  private static final Domain DOMAIN = new Domain(NAME, null);
+
+  /**
+   * Constructor.
+   *
+   * @param str a string representation of the parameter value.
+   */
+  public PrincipalParam(final String str) {
+    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+  }
+
+  @Override
+  public String getName() {
+    return NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java
new file mode 100644
index 0000000..b5eba07
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest.param;
+
+import java.util.regex.Pattern;
+
+/**
+ * String parameter.
+ */
+abstract class StringParam extends Param<String, StringParam.Domain> {
+  StringParam(final Domain domain, String str) {
+    super(domain, domain.parse(str));
+  }
+
+  /**
+   * @return the parameter value as a string
+   */
+  @Override
+  public String getValueString() {
+    return value;
+  }
+
+  /**
+   * The domain of the parameter.
+   */
+  static final class Domain extends Param.Domain<String> {
+    /**
+     * The pattern defining the domain; null .
+     */
+    private final Pattern pattern;
+
+    Domain(final String paramName, final Pattern pattern) {
+      super(paramName);
+      this.pattern = pattern;
+    }
+
+    @Override
+    public String getDomain() {
+      return pattern == null ? "<String>" : pattern.pattern();
+    }
+
+    @Override
+    String parse(String str) {
+      if (str != null && pattern != null) {
+        if (!pattern.matcher(str).matches()) {
+          throw new IllegalArgumentException("Invalid value: \"" + str
+              + "\" does not belong to the domain " + getDomain());
+        }
+      }
+      return str;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java
new file mode 100644
index 0000000..da208a1
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest.param;
+
+public class TypeParam extends StringParam {
+
+    /**
+     * Parameter name.
+     */
+    public static final String NAME = "type";
+    /**
+     * Default parameter value.
+     */
+    public static final String DEFAULT = "";
+
+    private static final Domain DOMAIN = new Domain(NAME, null);
+
+    /**
+     * Constructor.
+     *
+     * @param str a string representation of the parameter value.
+     */
+    public TypeParam(final String str) {
+        super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
+    }
+
+    @Override
+    public String getName() {
+        return NAME;
+    }
+}
+
+

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/resources/backend.conf.template
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/resources/backend.conf.template b/has/has-server/src/main/resources/backend.conf.template
new file mode 100644
index 0000000..598be24
--- /dev/null
+++ b/has/has-server/src/main/resources/backend.conf.template
@@ -0,0 +1,21 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+kdc_identity_backend = _JAR_
+#_JSON_DIR_
+#_MYSQL_

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/resources/kdc.conf.template
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/resources/kdc.conf.template b/has/has-server/src/main/resources/kdc.conf.template
new file mode 100644
index 0000000..3800cb3
--- /dev/null
+++ b/has/has-server/src/main/resources/kdc.conf.template
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[kdcdefaults]
+  kdc_host = _HOST_
+  kdc_udp_port = _PORT_
+  kdc_tcp_port = _PORT_
+  kdc_realm = _REALM_

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/resources/krb5.conf.template
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/resources/krb5.conf.template b/has/has-server/src/main/resources/krb5.conf.template
new file mode 100644
index 0000000..ee090c4
--- /dev/null
+++ b/has/has-server/src/main/resources/krb5.conf.template
@@ -0,0 +1,29 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[libdefaults]
+    kdc_realm = _REALM_
+    default_realm = _REALM_
+    udp_preference_limit = _UDP_LIMIT_
+    kdc_tcp_port = _PORT_
+    kdc_udp_port = _PORT_
+
+[realms]
+    _REALM_ = {
+_KDCS_
+    }

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/TestHasWebServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/TestHasWebServer.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestHasWebServer.java
new file mode 100644
index 0000000..148909f
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestHasWebServer.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.util.URLConnectionFactory;
+import org.apache.hadoop.has.server.web.WebConfigKey;
+import org.apache.hadoop.has.server.web.WebServer;
+import org.apache.hadoop.http.HttpConfig.Policy;
+import org.apache.hadoop.net.NetUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Arrays;
+import java.util.Collection;
+
+@RunWith(value = Parameterized.class)
+public class TestHasWebServer {
+  private static final String KEY_STORE_DIR = TestUtil.getTempPath("keystore");
+  private static File keyStoreDir = new File(KEY_STORE_DIR);
+  private static HasConfig httpsConf;
+  private static URLConnectionFactory connectionFactory;
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> policy() {
+    Object[][] params = new Object[][]{{Policy.HTTP_ONLY},
+        {Policy.HTTPS_ONLY}, {Policy.HTTP_AND_HTTPS}};
+    return Arrays.asList(params);
+  }
+
+  private final Policy policy;
+
+  public TestHasWebServer(Policy policy) {
+    super();
+    this.policy = policy;
+  }
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    httpsConf = new HasConfig();
+    // Create test keystore dir.
+    if (!keyStoreDir.exists()) {
+      if (!keyStoreDir.mkdirs()) {
+        System.err.println("Failed to create keystore-dir.");
+        System.exit(3);
+      }
+    }
+    String sslConfDir = TestUtil.getClasspathDir(TestRestApiBase.class);
+    TestUtil.setupSSLConfig(KEY_STORE_DIR, sslConfDir, httpsConf, false);
+    connectionFactory = URLConnectionFactory.newDefaultURLConnectionFactory(httpsConf);
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    FileUtil.fullyDelete(keyStoreDir);
+  }
+
+  @Test
+  public void testHttpPolicy() throws Exception {
+    httpsConf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, policy.name());
+    httpsConf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY, "localhost:11236");
+    httpsConf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, "localhost:19278");
+    httpsConf.setString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE, "simple");
+
+    WebServer server = null;
+    try {
+      server = new WebServer(httpsConf);
+      server.start();
+
+      Assert.assertTrue(implies(policy.isHttpEnabled(),
+          canAccess("http", server.getHttpAddress())));
+      Assert.assertTrue(implies(!policy.isHttpEnabled(),
+          server.getHttpAddress() == null));
+
+      Assert.assertTrue(implies(policy.isHttpsEnabled(),
+          canAccess("https", server.getHttpsAddress())));
+      Assert.assertTrue(implies(!policy.isHttpsEnabled(),
+          server.getHttpsAddress() == null));
+    } finally {
+      if (server != null) {
+        server.stop();
+      }
+    }
+  }
+
+  private static boolean canAccess(String scheme, InetSocketAddress address) {
+    if (address == null) {
+      return false;
+    }
+    try {
+      URL url = new URL(scheme + "://" + NetUtils.getHostPortString(address));
+      URLConnection conn = connectionFactory.openConnection(url);
+      conn.connect();
+      conn.getContent();
+    } catch (Exception e) {
+      return false;
+    }
+    return true;
+  }
+
+  private static boolean implies(boolean a, boolean b) {
+    return !a || b;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/TestRestApiBase.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/TestRestApiBase.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestRestApiBase.java
new file mode 100644
index 0000000..e95382c
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestRestApiBase.java
@@ -0,0 +1,336 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server;
+
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.api.client.config.ClientConfig;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.client.urlconnection.HTTPSProperties;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasConfigKey;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.spnego.AuthenticationException;
+import org.apache.hadoop.has.common.util.URLConnectionFactory;
+import org.apache.hadoop.has.server.web.WebConfigKey;
+import org.apache.hadoop.has.server.web.WebServer;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.glassfish.jersey.SslConfigurator;
+import org.junit.After;
+import org.junit.Before;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLSession;
+import javax.ws.rs.core.MultivaluedMap;
+import java.io.*;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.URL;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestRestApiBase {
+    private static String address;
+    protected static File testDir = new File(System.getProperty("test.dir", "target"));
+    private static File testClassDir = new File(testDir, "test-classes");
+    private static File confDir = new File(testClassDir, "conf");
+    private static File workDir = new File(testDir, "work-dir");
+    private static HasServer server = null;
+    private static final String KEY_STORE_DIR = TestUtil.getTempPath("keystore");
+    private static File keyStoreDir = new File(KEY_STORE_DIR);
+    private static HasConfig httpsConf;
+
+    @Before
+    public void startHasServer() throws Exception {
+        // Create test keystoreDir and workDir.
+        if (!keyStoreDir.exists()) {
+            if (!keyStoreDir.mkdirs()) {
+                System.err.println("Failed to create keystore-dir.");
+                System.exit(3);
+            }
+        }
+
+        if (!workDir.exists()) {
+            if (!workDir.mkdirs()) {
+                System.err.println("Failed to create work-dir.");
+                System.exit(3);
+            }
+        }
+
+        // Configure test HAS server.
+        httpsConf = new HasConfig();
+        String sslConfDir = TestUtil.getClasspathDir(TestRestApiBase.class);
+        TestUtil.setupSSLConfig(KEY_STORE_DIR, sslConfDir, httpsConf, false);
+        httpsConf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
+        httpsConf.setString(HasConfigKey.FILTER_AUTH_TYPE, "simple");
+
+        // Start test HAS server.
+        int httpsPort = 10000 + (int) (System.currentTimeMillis() % 10000); // Generate test port randomly
+        String host = "localhost";
+        address = host + ":" + httpsPort;
+        httpsConf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, address);
+
+        server = new HasServer(confDir);
+        server.setWebServer(new WebServer(httpsConf));
+        server.setWorkDir(workDir);
+        try {
+            server.startWebServer();
+        } catch (HasException e) {
+            System.err.println("Errors occurred when start HAS server: " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    @After
+    public void stopHasServer() {
+        server.stopWebServer();
+        if (keyStoreDir.exists()) {
+            FileUtil.fullyDelete(keyStoreDir);
+        }
+        if (workDir.exists()) {
+            FileUtil.fullyDelete(workDir);
+        }
+    }
+
+    private void startKdc() {
+        WebResource webResource = getWebResource("kdcstart");
+        String response = webResource.get(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            if (!result.getString("result").equals("success")) {
+                System.err.println("Errors occurred when start HAS KDC server.");
+                System.exit(6);
+            }
+        } catch (JSONException e) {
+            System.err.println("Errors occurred when start HAS KDC server. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected WebResource getWebResource(String restName) {
+        String apiUrl = "https://" + address + "/has/v1/" + restName;
+        HasConfig clientConf = new HasConfig();
+        try {
+            clientConf.addIniConfig(new File(httpsConf.getString(SSLFactory.SSL_CLIENT_CONF_KEY)));
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        SslConfigurator sslConfigurator = SslConfigurator.newInstance()
+            .trustStoreFile(clientConf.getString("ssl.client.truststore.location"))
+            .trustStorePassword(clientConf.getString("ssl.client.truststore.password"));
+        sslConfigurator.securityProtocol("SSL");
+        SSLContext sslContext = sslConfigurator.createSSLContext();
+        ClientConfig clientConfig = new DefaultClientConfig();
+        clientConfig.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES,
+            new HTTPSProperties(new HostnameVerifier() {
+                @Override
+                public boolean verify(String s, SSLSession sslSession) {
+                    return false;
+                }
+            }, sslContext));
+        Client client = Client.create(clientConfig);
+        return client.resource(apiUrl);
+    }
+
+    protected void getKrb5Conf() {
+        WebResource webResource = getWebResource("getkrb5conf");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+
+    protected void getHasConf() {
+        WebResource webResource = getWebResource("gethasconf");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+        File hasConf = new File(confDir, "has-client.conf");
+        if (hasConf.exists()) {
+            if (!hasConf.delete()) {
+                System.err.println("Failed to delete has-client.conf.");
+            }
+        }
+    }
+
+    protected void kdcStart() {
+        WebResource webResource = getWebResource("kdcstart");
+        String response = webResource.get(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException e) {
+            System.err.println("Failed to start HAS KDC server. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected void kdcInit() {
+        startKdc();
+        WebResource webResource = getWebResource("kdcinit");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+
+    protected void createPrincipals() {
+        String webServerUrl = "https://" + address + "/has/v1/";
+        startKdc();
+
+        // Create test host roles json object.
+        JSONObject hostRoles = new JSONObject();
+        try {
+            JSONObject host1 = new JSONObject();
+            host1.put("name", "host1");
+            host1.put("hostRoles", "HDFS,YARN");
+            JSONObject host2 = new JSONObject();
+            host2.put("name", "host2");
+            host2.put("hostRoles", "ZOOKEEPER,HBASE");
+            JSONArray hosts = new JSONArray();
+            hosts.put(host1);
+            hosts.put(host2);
+            hostRoles.put("HOSTS", hosts);
+        } catch (JSONException e) {
+            System.err.println("Failed to create test host roles json object. " + e.toString());
+            System.exit(6);
+        }
+
+        try {
+            URL url = null;
+            try {
+                url = new URL(webServerUrl + "admin/createprincipals");
+            } catch (MalformedURLException e) {
+                e.printStackTrace();
+            }
+
+            URLConnectionFactory connectionFactory = URLConnectionFactory.newDefaultURLConnectionFactory(httpsConf);
+            HttpURLConnection httpConn = (HttpURLConnection) connectionFactory.openConnection(url, false, httpsConf);
+            httpConn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
+            httpConn.setRequestMethod("PUT");
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            OutputStream out = httpConn.getOutputStream();
+            out.write(hostRoles.toString().getBytes());
+            out.flush();
+            out.close();
+
+            assertEquals(200, httpConn.getResponseCode());
+            BufferedReader reader = httpConn.getResponseCode()
+                == HttpURLConnection.HTTP_OK ? new BufferedReader(
+                new InputStreamReader(httpConn.getInputStream(),
+                    "UTF-8")) : new BufferedReader(
+                new InputStreamReader(httpConn.getErrorStream(),
+                    "UTF-8"));
+
+            String response = reader.readLine();
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException | IOException | AuthenticationException e) {
+            System.err.println("Failed to create principals by hostRoles. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected void exportKeytabs() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/exportkeytabs");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("host", "host1");
+        params.add("role", "HDFS");
+        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+
+    protected void exportKeytab() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/exportkeytab");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", "admin@HADOOP.COM");
+        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+
+    protected void addPrincipal() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/addprincipal");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", "admin");
+        params.add("password", "123");
+        String response = webResource.queryParams(params).post(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException e) {
+            System.err.println("Failed to add principal. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected void getPrincipals() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/getprincipals");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        String response = webResource.queryParams(params).get(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException e) {
+            System.err.println("Failed to get principals. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected void renamePrincipal() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/renameprincipal");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("oldprincipal", "admin");
+        params.add("newprincipal", "admin2");
+        String response = webResource.queryParams(params).post(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException e) {
+            System.err.println("Failed to rename principal. " + e.toString());
+            System.exit(6);
+        }
+    }
+
+    protected void deletePrincipal() {
+        startKdc();
+        WebResource webResource = getWebResource("admin/deleteprincipal");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", "admin2");
+        String response = webResource.queryParams(params).delete(String.class);
+        try {
+            JSONObject result = new JSONObject(response);
+            assertEquals("success", result.getString("result"));
+        } catch (JSONException e) {
+            System.err.println("Failed to delete principal. " + e.toString());
+            System.exit(6);
+        }
+    }
+}


[02/10] directory-kerby git commit: Add the HAS project to Kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hclient/HasClientLoginTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hclient/HasClientLoginTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hclient/HasClientLoginTool.java
new file mode 100644
index 0000000..906b6fb
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hclient/HasClientLoginTool.java
@@ -0,0 +1,269 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hclient;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.client.HasClient;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasJaasLoginUtil;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.server.KdcConfig;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
+import org.apache.kerby.util.OSUtil;
+
+import javax.security.auth.Subject;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+public class HasClientLoginTool {
+    private static List<String> principalList = new ArrayList<String>();
+    private static List<File>  keytabList = new ArrayList<File>();
+
+    private static final String KEYTAB_USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\k=login-test.cmd" : "Usage: sh bin/login-test.sh")
+        + " [add|run|delete] [conf_dir] [work_dir] [number]\n"
+        + "\n";
+
+    private static final String TGT_USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\k=login-test.cmd" : "Usage: sh bin/login-test.sh")
+        + " tgt [conf_dir]\n"
+        + "\n";
+
+    private static void printKeytabUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(KEYTAB_USAGE);
+        System.exit(-1);
+    }
+
+    private static void printTgtUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(TGT_USAGE);
+        System.exit(-1);
+    }
+
+    public static class Task implements Runnable {
+        private int index;
+
+        Task(int index) {
+            this.index = index;
+        }
+
+        @Override
+        public void run() {
+            Subject subject = null;
+            try {
+                subject = HasJaasLoginUtil.loginUsingKeytab(principalList.get(index),
+                    keytabList.get(index));
+            } catch (IOException e) {
+                System.err.println("Fail to login using keytab. " + e);
+            }
+            System.out.println("Login succeeded for user: "
+                + subject.getPrincipals().iterator().next());
+        }
+    }
+
+    public static void main(String[] args) {
+
+        String cmd = args[0];
+        File confDir;
+        File workDir;
+
+        if (cmd.equals("tgt")) {
+            if (args.length != 2) {
+                printTgtUsage("Need 2 args.");
+                return;
+            }
+
+            confDir = new File(args[1]);
+            if (!confDir.exists()) {
+                printTgtUsage("Need the valid conf dir.");
+                return;
+            }
+            File confFile = new File(confDir, "hadmin.conf");
+            HasConfig hasConfig;
+            try {
+                hasConfig = HasUtil.getHasConfig(confFile);
+            } catch (HasException e) {
+                System.err.println(e.getMessage());
+                return;
+            }
+            if (hasConfig == null) {
+                System.err.println("hadmin.conf not exist in " + confDir.getAbsolutePath());
+                return;
+            }
+            String host = hasConfig.getHttpsHost();
+            String port = hasConfig.getHttpsPort();
+
+            HasClient hasClient = new HasClient();
+            TgtTicket tgtTicket;
+            try {
+                tgtTicket = hasClient.requestTgt();
+            } catch (HasException e) {
+                System.err.println("Errors occurred when getting TGT. " + e.getMessage());
+                return;
+            }
+
+            System.out.println("Get the tgt ticket successfully!");
+            System.out.println("The client principal of tgt ticket: " + tgtTicket.getClientPrincipal());
+
+            Subject subject = null;
+            try {
+                subject = HasJaasLoginUtil.loginUserFromTgtTicket(
+                    "https://" + host + ":" + port + "/has/v1?auth_type=RAM");
+            } catch (IOException e) {
+                System.err.println("Errors occurred when login user with TGT. " + e.getMessage());
+                return;
+            }
+
+            System.out.println("Principal: " + subject.getPrincipals().iterator().next());
+        } else {
+            if (args.length != 4) {
+                printKeytabUsage("Need 4 args.");
+                return;
+            }
+
+            confDir = new File(args[1]);
+            workDir = new File(args[2]);
+
+            if (!confDir.exists()) {
+                printKeytabUsage("Need the valid conf dir.");
+                return;
+            }
+            if (!workDir.exists()) {
+                printKeytabUsage("Need the valid work dir.");
+                return;
+            }
+
+            int taskNum = Integer.parseInt(args[3]);
+
+            System.out.println("The task num is: " + taskNum);
+
+            if (taskNum <= 0) {
+                printKeytabUsage("The task num must be greater than zero");
+                System.exit(-1);
+            }
+
+            HasAdminClient hasAdminClient;
+            HasAuthAdminClient authHasAdminClient = null;
+            File confFile = new File(confDir, "hadmin.conf");
+            HasConfig hasConfig = null;
+            try {
+                hasConfig = HasUtil.getHasConfig(confFile);
+            } catch (HasException e) {
+                System.err.println(e.getMessage());
+                return;
+            }
+
+            if (hasConfig == null) {
+                System.err.println("hadmin.conf not exist in " + confDir.getAbsolutePath());
+                return;
+            }
+
+            if (hasConfig.getFilterAuthType().equals("kerberos")) {
+                authHasAdminClient = new HasAuthAdminClient(hasConfig);
+            }
+            if (authHasAdminClient != null) {
+                hasAdminClient = authHasAdminClient;
+            } else {
+                hasAdminClient = new HasAdminClient(hasConfig);
+            }
+            String realm = null;
+            try {
+                KdcConfig kdcConfig = KdcUtil.getKdcConfig(confDir);
+                realm = kdcConfig.getKdcRealm();
+            } catch (KrbException e) {
+                printKeytabUsage(e.getMessage());
+            }
+
+            if (cmd.equals("add")) {
+                for (int i = 0; i < taskNum; i++) {
+                    String principal = "test" + i + "@" + realm;
+                    try {
+                        hasAdminClient.addPrincipal(principal);
+                    } catch (HasException e) {
+                        System.err.println("Errors occurred when adding principal. "
+                            + e.getMessage());
+                        return;
+                    }
+                    File keytabFile = new File(workDir, i + ".keytab");
+                    try {
+                        hasAdminClient.exportKeytab(keytabFile, principal);
+                    } catch (HasException e) {
+                        System.err.println("Errors occurred when exporting the keytabs. "
+                            + e.getMessage());
+                        return;
+                    }
+                    System.out.println("Add principals and keytabs successfully.");
+                }
+            } else if (cmd.equals("run")) {
+                ExecutorService exec;
+                for (int i = 0; i < taskNum; i++) {
+                    String principal = "test" + i + "@" + realm;
+                    principalList.add(i, principal);
+                    File file = new File(workDir, i + ".keytab");
+                    keytabList.add(i, file);
+                }
+                System.out.println("Start the login test.");
+                Long startTime = System.currentTimeMillis();
+                exec = Executors.newFixedThreadPool(5);
+                for (int i = 0; i < taskNum; ++i) {
+                    exec.submit(new Task(i));
+                }
+                exec.shutdown();
+                try {
+                    exec.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
+                } catch (InterruptedException e) {
+                    System.err.println(e.getMessage());
+                    return;
+                }
+                Long endTime = System.currentTimeMillis();
+                System.out.println("Finish the login test.");
+                System.out.println("Cost time: " + (endTime - startTime) + "ms");
+            } else if (cmd.equals("delete")) {
+                for (int i = 0; i < taskNum; i++) {
+                    String principal = "test" + i + "@" + realm;
+                    try {
+                        hasAdminClient.deletePrincipal(principal);
+                    } catch (HasException e) {
+                        System.err.println("Errors occurred when deleting the principal. "
+                            + e.getMessage());
+                        continue;
+                    }
+                    File file = new File(workDir, i + ".keytab");
+                    if (!file.delete()) {
+                        System.err.println("Failed to delete " + i + ".keytab.");
+                    }
+                }
+                System.out.println("Delete principals and keytabs successfully.");
+            } else {
+                printKeytabUsage("Need the cmd with add, run or delete.");
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/HasInitTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/HasInitTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/HasInitTool.java
new file mode 100644
index 0000000..1f46305
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/HasInitTool.java
@@ -0,0 +1,132 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.kdcinit;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.tool.client.kdcinit.cmd.*;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.util.OSUtil;
+
+import java.io.File;
+import java.util.Scanner;
+
+public class HasInitTool {
+    private static final String PROMPT = HasInitTool.class.getSimpleName();
+    private static final String USAGE = (OSUtil.isWindows()
+            ? "Usage: bin\\hadmin.cmd" : "Usage: sh bin/kdcinit.sh")
+            + " <conf-file>\n"
+            + "\tExample:\n"
+            + "\t\t"
+            + (OSUtil.isWindows()
+            ? "bin\\kdcinit.cmd" : "sh bin/kdcinit.sh")
+            + " conf\n";
+
+    private static final String LEGAL_COMMANDS = "Available commands are: "
+            + "\n"
+            + "get_krb5conf, getkrb5\n"
+            + "                         Get krb5.conf\n"
+            + "get_hasConf, gethas\n"
+            + "                         Get has-client.conf\n"
+            + "set_plugin, setplugin\n"
+            + "                         Set plugin\n"
+            + "config_kdcBackend, confbackend\n"
+            + "                         Config kdc backend\n"
+            + "config_kdc, confkdc\n"
+            + "                         Config kdc\n"
+            + "start_kdc, start\n"
+            + "                         Start kdc\n"
+            + "init_kdc, init\n"
+            + "                         Init kdc\n";
+
+    public static void main(String[] args) {
+        if (args.length < 1) {
+            System.err.println(USAGE);
+            System.exit(1);
+        }
+        String confDirPath = args[0];
+        File confFile = new File(confDirPath, "hadmin.conf");
+        HasConfig hasConfig;
+        try {
+            hasConfig = HasUtil.getHasConfig(confFile);
+        } catch (HasException e) {
+            System.err.println(e.getMessage());
+            return;
+        }
+
+        System.out.println(LEGAL_COMMANDS);
+        System.out.println("enter \"<cmd> [?][-help]\" to get cmd help.");
+        Scanner scanner = new Scanner(System.in, "UTF-8");
+        System.out.print(PROMPT + ": ");
+        String input = scanner.nextLine();
+
+        HasAdminClient hadmin = new HasAdminClient(hasConfig, new File(confDirPath));
+        while (!(input.equals("quit") || input.equals("exit") || input.equals("q"))) {
+            try {
+                execute(hadmin, input);
+            } catch (KrbException e) {
+                System.err.println(e.getMessage());
+            }
+            System.out.print(PROMPT + ": ");
+            input = scanner.nextLine();
+        }
+    }
+
+    private static void execute(HasAdminClient hadmin, String input) throws KrbException {
+        input = input.trim();
+        if (input.startsWith("cmd")) {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        String[] items = input.split("\\s+");
+        String cmd = items[0];
+
+        KdcInitCmd executor;
+        if (cmd.equals("get_krb5conf")
+                || cmd.equals("getkrb5")) {
+            executor = new HasGetKrb5confCmd(hadmin);
+        } else if (cmd.equals("get_hasConf")
+                || cmd.equals("gethas")) {
+            executor = new HasGetHasconfCmd(hadmin);
+        } else if (cmd.equals("set_plugin")
+                || cmd.equals("setplugin")) {
+            executor = new HasSetPluginCmd(hadmin);
+        } else if (cmd.equals("config_kdcBackend")
+                || cmd.equals("confbackend")) {
+            executor = new HasConfKdcBackendCmd(hadmin);
+        } else if (cmd.equals("config_kdc")
+                || cmd.equals("confkdc")) {
+            executor = new HasConfKdcCmd(hadmin);
+        } else if (cmd.equals("start_kdc")
+                || cmd.equals("start")) {
+            executor = new HasStartKdcCmd(hadmin);
+        } else if (cmd.equals("init_kdc")
+                || cmd.equals("init")) {
+            executor = new HasInitKdcCmd(hadmin);
+        } else {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        executor.execute(items);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java
new file mode 100644
index 0000000..002e936
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcBackendCmd.java
@@ -0,0 +1,66 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.kdcinit.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+/**
+ * Remote config kdc cmd
+ */
+public class HasConfKdcBackendCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: config_kdcBackend <backendType> [dir] [url] [user]"
+        + " [password]\n"
+        + "\tSupported backendType : json,mysql\n"
+        + "\tExample:\n"
+        + "\t\tconfig_kdcBackend json /tmp/has/jsonbackend \n"
+        + "\t\tconfig_kdcBackend mysql jdbc:mysql://127.0.0.1:3306/mysqlbackend root passwd\n";
+
+    public HasConfKdcBackendCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        if (items.length < 3) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient = getHadmin();
+        if (items.length >= 3 && items[1].equals("json")) {
+            hasAdminClient.configKdcBackend(items[1], items[2],
+                    null, null, null);
+        } else if (items.length >= 5 && items[1].equals("mysql")) {
+            hasAdminClient.configKdcBackend(items[1], null,
+                    items[2], items[3], items[4]);
+        } else {
+            System.err.println(USAGE);
+            return;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java
new file mode 100644
index 0000000..fdd3e92
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasConfKdcCmd.java
@@ -0,0 +1,54 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.kdcinit.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+/**
+ * Remote config kdc cmd
+ */
+public class HasConfKdcCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: config_kdc <host> <port> <realm>\n"
+        + "\tExample:\n"
+        + "\t\tconfig_kdc localhost 88 HADOOP.COM\n";
+
+    public HasConfKdcCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        if (items.length < 4) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient = getHadmin();
+        hasAdminClient.configKdc(items[2], items[3], items[1]);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java
new file mode 100644
index 0000000..3011cb4
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetHasconfCmd.java
@@ -0,0 +1,77 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.kdcinit.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+
+/**
+ * Remote get has-client.conf cmd
+ */
+public class HasGetHasconfCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: get_hasConf [-p] [path]\n"
+        + "\tExample:\n"
+        + "\t\tget_hasConf\n";
+
+    public HasGetHasconfCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        File path = getHadmin().getConfDir();
+        if (items.length >= 3 && items[1].startsWith("-p")) {
+            path = new File(items[2]);
+            if (!path.exists()) {
+                if (!path.mkdirs()) {
+                    System.err.println("Cannot create file : " + items[2]);
+                    return;
+                }
+            }
+        }
+        File hasConf = new File(path, "has-client.conf");
+
+        HasAdminClient hasAdminClient = getHadmin();
+        String content = hasAdminClient.getHasconf();
+        if (content == null) {
+            System.err.println("Failed to get has.conf.");
+            return;
+        }
+        try {
+            PrintStream ps = new PrintStream(new FileOutputStream(hasConf));
+            ps.println(content);
+            System.out.println("has-client.conf has saved in : " + hasConf.getAbsolutePath());
+        } catch (FileNotFoundException e) {
+            System.err.println(e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java
new file mode 100644
index 0000000..4b39ac8
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasGetKrb5confCmd.java
@@ -0,0 +1,77 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.kdcinit.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+
+/**
+ * Remote get krb5.conf cmd
+ */
+public class HasGetKrb5confCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: get_krb5conf [-p] [path]\n"
+        + "\tExample:\n"
+        + "\t\tget_krb5conf -p /tmp/has\n";
+
+    public HasGetKrb5confCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        File path = getHadmin().getConfDir();
+        if (items.length >= 3 && items[1].startsWith("-p")) {
+            path = new File(items[2]);
+            if (!path.exists()) {
+                if (!path.mkdirs()) {
+                    System.err.println("Cannot create file : " + items[2]);
+                    return;
+                }
+            }
+        }
+        File krb5Conf = new File(path, "krb5.conf");
+
+        HasAdminClient hasAdminClient = getHadmin();
+        String content = hasAdminClient.getKrb5conf();
+        if (content == null) {
+            System.err.println("Failed to get krb5.conf.");
+            return;
+        }
+        try {
+            PrintStream ps = new PrintStream(new FileOutputStream(krb5Conf));
+            ps.println(content);
+            System.out.println("krb5.conf has saved in : " + krb5Conf.getAbsolutePath());
+        } catch (FileNotFoundException e) {
+            System.err.println(e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java
new file mode 100644
index 0000000..24cb63c
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasInitKdcCmd.java
@@ -0,0 +1,94 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.kdcinit.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Remote init kdc cmd
+ */
+public class HasInitKdcCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: init_kdc [-p] [path]\n"
+        + "\tExample:\n"
+        + "\t\tinit_kdc\n";
+
+    public HasInitKdcCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        File path = getHadmin().getConfDir();
+        if (items.length >= 3 && items[1].startsWith("-p")) {
+            path = new File(items[2]);
+            if (!path.exists()) {
+                if (!path.mkdirs()) {
+                    System.err.println("Cannot create file : " + items[2]);
+                    return;
+                }
+            }
+        }
+        File hadminKeytab = new File(path, "admin.keytab");
+
+        HasAdminClient hasAdminClient = getHadmin();
+        InputStream content = hasAdminClient.initKdc();
+
+        if (content == null) {
+            System.err.println("Failed to init kdc.");
+            return;
+        }
+
+        FileOutputStream fos = null;
+        try {
+            fos = new FileOutputStream(hadminKeytab);
+        } catch (FileNotFoundException e) {
+            System.err.println("the admin keytab file not found. " + e.getMessage());
+        }
+        byte[] buffer = new byte[4 * 1024];
+        int read;
+        try {
+            while ((read = content.read(buffer)) > 0) {
+                fos.write(buffer, 0, read);
+            }
+            fos.close();
+            content.close();
+        } catch (IOException e) {
+            System.err.println("Errors occurred when getting the admin.keytab. " + e.getMessage());
+        }
+
+        System.out.println("admin.keytab has saved in : " + hadminKeytab.getAbsolutePath()
+            + ",\nplease safely save it to use hadmin.");
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java
new file mode 100644
index 0000000..457cf50
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasSetPluginCmd.java
@@ -0,0 +1,53 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.kdcinit.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+/**
+ * Remote set plugin cmd
+ */
+public class HasSetPluginCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: set_plugin <plugin>\n"
+        + "\tExample:\n"
+        + "\t\tset_plugin RAM\n";
+
+    public HasSetPluginCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        } else {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient = getHadmin();
+        hasAdminClient.setPlugin(items[1]);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java
new file mode 100644
index 0000000..6511e0a
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/HasStartKdcCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.kdcinit.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+/**
+ * Remote start kdc cmd
+ */
+public class HasStartKdcCmd extends KdcInitCmd {
+
+    public static final String USAGE = "Usage: start_kdc\n"
+        + "\tExample:\n"
+        + "\t\tstart\n";
+
+    public HasStartKdcCmd(HasAdminClient hadmin) {
+        super(hadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws KrbException {
+        if (items.length >= 2) {
+            if (items[1].startsWith("?") || items[1].startsWith("-help")) {
+                System.out.println(USAGE);
+                return;
+            }
+        }
+        HasAdminClient hasAdminClient = getHadmin();
+        hasAdminClient.startKdc();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/KdcInitCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/KdcInitCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/KdcInitCmd.java
new file mode 100644
index 0000000..a75f702
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kdcinit/cmd/KdcInitCmd.java
@@ -0,0 +1,42 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.kdcinit.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.kerby.kerberos.kerb.KrbException;
+
+public abstract class KdcInitCmd {
+
+    private HasAdminClient hadmin;
+
+    public KdcInitCmd(HasAdminClient hadmin) {
+        this.hadmin = hadmin;
+    }
+
+    protected HasAdminClient getHadmin() {
+        return hadmin;
+    }
+
+    /**
+     * Execute the kdc init cmd.
+     * @param input Input cmd to execute
+     */
+    public abstract void execute(String[] input) throws KrbException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitOption.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitOption.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitOption.java
new file mode 100644
index 0000000..0e29085
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitOption.java
@@ -0,0 +1,88 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License. 
+ *
+ */
+package org.apache.hadoop.has.tool.client.kinit;
+
+import org.apache.kerby.KOption;
+import org.apache.kerby.KOptionInfo;
+import org.apache.kerby.KOptionType;
+import org.apache.kerby.kerberos.kerb.client.KrbOptionGroup;
+
+public enum KinitOption implements KOption {
+    NONE(null),
+
+    CLIENT_PRINCIPAL(new KOptionInfo("client-principal", "Client principal",
+        KrbOptionGroup.KRB, KOptionType.STR)),
+    LIFE_TIME(new KOptionInfo("-l", "lifetime",
+        KrbOptionGroup.KRB, KOptionType.DURATION)),
+    START_TIME(new KOptionInfo("-s", "start time",
+        KrbOptionGroup.KRB, KOptionType.DURATION)),
+    RENEWABLE_LIFE(new KOptionInfo("-r", "renewable lifetime",
+        KrbOptionGroup.KRB, KOptionType.DURATION)),
+    FORWARDABLE(new KOptionInfo("-f", "forwardable",
+        KrbOptionGroup.KDC_FLAGS)),
+    NOT_FORWARDABLE(new KOptionInfo("-F", "not forwardable",
+        KrbOptionGroup.KDC_FLAGS)),
+    PROXIABLE(new KOptionInfo("-p", "proxiable",
+        KrbOptionGroup.KDC_FLAGS)),
+    NOT_PROXIABLE(new KOptionInfo("-P", "not proxiable",
+        KrbOptionGroup.KDC_FLAGS)),
+    RENEW(new KOptionInfo("-R", "renew",
+        KrbOptionGroup.KDC_FLAGS)),
+    USE_PASSWD(new KOptionInfo("using-password", "using password",
+        KrbOptionGroup.KRB)),
+    USER_PASSWD(new KOptionInfo("user-passwd", "User plain password",
+        KrbOptionGroup.KRB)),
+    USE_KEYTAB(new KOptionInfo("-k", "use keytab",
+        KrbOptionGroup.KRB)),
+    USE_DFT_KEYTAB(new KOptionInfo("-i", "use default client keytab (with -k)",
+        KrbOptionGroup.KRB)),
+    KEYTAB_FILE(new KOptionInfo("-t", "filename of keytab to use",
+        KrbOptionGroup.KRB, KOptionType.FILE)),
+    KRB5_CACHE(new KOptionInfo("-c", "Kerberos 5 cache name",
+        KrbOptionGroup.KRB, KOptionType.STR)),
+    SERVICE(new KOptionInfo("-S", "service",
+        KrbOptionGroup.KRB, KOptionType.STR)),
+
+    CONF_DIR(new KOptionInfo("-conf", "conf dir", KrbOptionGroup.KRB, KOptionType.DIR));
+
+    private final KOptionInfo optionInfo;
+
+    KinitOption(KOptionInfo optionInfo) {
+        this.optionInfo = optionInfo;
+    }
+
+    @Override
+    public KOptionInfo getOptionInfo() {
+        return optionInfo;
+    }
+
+    public static KinitOption fromName(String name) {
+        if (name != null) {
+            for (KinitOption ko : values()) {
+                if (ko.optionInfo != null
+                        && ko.optionInfo.getName().equals(name)) {
+                    return ko;
+                }
+            }
+        }
+        return NONE;
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitTool.java
new file mode 100644
index 0000000..a061266
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/kinit/KinitTool.java
@@ -0,0 +1,384 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License. 
+ *
+ */
+package org.apache.hadoop.has.tool.client.kinit;
+
+import org.apache.kerby.KOption;
+import org.apache.kerby.KOptionGroup;
+import org.apache.kerby.KOptionInfo;
+import org.apache.kerby.KOptionType;
+import org.apache.kerby.KOptions;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.client.KrbClient;
+import org.apache.kerby.kerberos.kerb.client.KrbKdcOption;
+import org.apache.kerby.kerberos.kerb.client.KrbOption;
+import org.apache.kerby.kerberos.kerb.client.KrbOptionGroup;
+import org.apache.kerby.kerberos.kerb.client.PkinitOption;
+import org.apache.kerby.kerberos.kerb.client.TokenOption;
+import org.apache.kerby.kerberos.kerb.type.ticket.SgtTicket;
+import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
+import org.apache.kerby.util.OSUtil;
+import org.apache.kerby.util.SysUtil;
+
+import java.io.Console;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.Scanner;
+
+/**
+ * kinit like tool
+ *
+ * Ref. MIT kinit command tool usage.
+ */
+public class KinitTool {
+
+    private static final String USAGE = (OSUtil.isWindows()
+            ? "Usage: bin\\kinit.cmd" : "Usage: sh bin/kinit.sh")
+            + " <-conf conf_dir> [-V] [-l lifetime] [-s start_time]\n"
+            + "\t\t[-r renewable_life] [-f | -F] [-p | -P] -n [-a | -A] [-C] [-E]\n"
+            + "\t\t[-v] [-R] [-k [-i|-t keytab_file]] [-c cachename]\n"
+            + "\t\t[-S service_name] [-T ticket_armor_cache]\n"
+            + "\t\t[-X <attribute>[=<value>]] <principal>\n\n"
+            + "\tDESCRIPTION:\n"
+            + "\t\tkinit obtains and caches an initial ticket-granting ticket for principal.\n\n"
+            + "\tOPTIONS:\n"
+            + "\t\t-V verbose\n"
+            + "\t\t-l lifetime\n"
+            + "\t\t-s start time\n"
+            + "\t\t-r renewable lifetime\n"
+            + "\t\t-f forwardable\n"
+            + "\t\t-F not forwardable\n"
+            + "\t\t-p proxiable\n"
+            + "\t\t-P not proxiable\n"
+            + "\t\t-n anonymous\n"
+            + "\t\t-a include addresses\n"
+            + "\t\t-A do not include addresses\n"
+            + "\t\t-v validate\n"
+            + "\t\t-R renew\n"
+            + "\t\t-C canonicalize\n"
+            + "\t\t-E client is enterprise principal name\n"
+            + "\t\t-k use keytab\n"
+            + "\t\t-i use default client keytab (with -k)\n"
+            + "\t\t-t filename of keytab to use\n"
+            + "\t\t-c Kerberos 5 cache name\n"
+            + "\t\t-S service\n"
+            + "\t\t-T armor credential cache\n"
+            + "\t\t-X <attribute>[=<value>]\n"
+            + "\n";
+
+    private static void printUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(USAGE);
+        System.exit(-1);
+    }
+
+    private static final String KVNO_USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\kinit.cmd" : "Usage: sh bin/kinit.sh")
+        + " <-conf conf_dir> <-c cachename> <-S service_name>\n\n"
+        + "\tDESCRIPTION:\n"
+        + "\t\tkinit obtains a service ticket for the specified principal and prints out the key version number.\n"
+        + "\n";
+
+    private static void printKvnoUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(KVNO_USAGE);
+        System.exit(-1);
+    }
+
+    /**
+     * Get password for the input principal from console
+     */
+    private static String getPassword(String principal) {
+        Console console = System.console();
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                    + "maybe you're running this from within an IDE. "
+                    + "Use scanner to read password.");
+            System.out.println("Password for " + principal + ":");
+            try (Scanner scanner = new Scanner(System.in, "UTF-8")) {
+                return scanner.nextLine().trim();
+            }
+        }
+        console.printf("Password for " + principal + ":");
+        char[] passwordChars = console.readPassword();
+        String password = new String(passwordChars).trim();
+        Arrays.fill(passwordChars, ' ');
+
+        return password;
+    }
+
+    private static void requestTicket(String principal, KOptions ktOptions) {
+        ktOptions.add(KinitOption.CLIENT_PRINCIPAL, principal);
+
+        File confDir = null;
+        if (ktOptions.contains(KinitOption.CONF_DIR)) {
+            confDir = ktOptions.getDirOption(KinitOption.CONF_DIR);
+        }
+
+        KrbClient krbClient = null;
+        try {
+            krbClient = getClient(confDir);
+        } catch (KrbException e) {
+            System.err.println("Create krbClient failed: " + e.getMessage());
+            System.exit(1);
+        }
+
+        if (ktOptions.contains(KinitOption.RENEW)) {
+            if (ktOptions.contains(KinitOption.KRB5_CACHE)) {
+                String ccName = ktOptions.getStringOption(KinitOption.KRB5_CACHE);
+                File ccFile = new File(ccName);
+
+                SgtTicket sgtTicket = null;
+                try {
+                    sgtTicket = krbClient.requestSgt(ccFile, null);
+                } catch (KrbException e) {
+                    System.err.println("kinit: " + e.getKrbErrorCode().getMessage());
+                }
+
+                try {
+                    krbClient.renewTicket(sgtTicket, ccFile);
+                } catch (KrbException e) {
+                    System.err.println("kinit: " + e.getKrbErrorCode().getMessage());
+                }
+
+                System.out.println("Successfully renewed.");
+            }
+            return;
+        }
+
+        if (ktOptions.contains(KinitOption.SERVICE) && ktOptions.contains(KinitOption.KRB5_CACHE)) {
+            String ccName = ktOptions.getStringOption(KinitOption.KRB5_CACHE);
+            File ccFile = new File(ccName);
+            if (ccFile.exists()) {
+                System.out.println("Use credential cache to request a service ticket.");
+                String servicePrincipal = ktOptions.getStringOption(KinitOption.SERVICE);
+                SgtTicket sgtTicket = null;
+                try {
+                    sgtTicket = krbClient.requestSgt(ccFile, servicePrincipal);
+                } catch (KrbException e) {
+                    System.err.println("Kinit: get service ticket failed: " + e.getMessage());
+                    System.exit(1);
+                }
+
+                try {
+                    krbClient.storeTicket(sgtTicket, ccFile);
+                } catch (KrbException e) {
+                    System.err.println("Kinit: store ticket failed: " + e.getMessage());
+                    System.exit(1);
+                }
+
+                System.out.println(sgtTicket.getEncKdcRepPart().getSname().getName() + ": knvo = "
+                    + sgtTicket.getTicket().getEncryptedEncPart().getKvno());
+                return;
+            }
+        }
+
+        if (!ktOptions.contains(KinitOption.USE_KEYTAB)) {
+            //If not request tickets by keytab than by password.
+            ktOptions.add(KinitOption.USE_PASSWD);
+            String password = getPassword(principal);
+            ktOptions.add(KinitOption.USER_PASSWD, password);
+        }
+
+        TgtTicket tgt = null;
+        try {
+            tgt = krbClient.requestTgt(convertOptions(ktOptions));
+        } catch (KrbException e) {
+            System.err.println("Authentication failed: " + e.getMessage());
+            System.exit(1);
+        }
+
+        File ccacheFile;
+        if (ktOptions.contains(KinitOption.KRB5_CACHE)) {
+            String ccacheName = ktOptions.getStringOption(KinitOption.KRB5_CACHE);
+            ccacheFile = new File(ccacheName);
+        } else {
+            String ccacheName = getCcacheName(krbClient);
+            ccacheFile = new File(ccacheName);
+        }
+
+        try {
+            krbClient.storeTicket(tgt, ccacheFile);
+        } catch (KrbException e) {
+            System.err.println("Store ticket failed: " + e.getMessage());
+            System.exit(1);
+        }
+
+        System.out.println("Successfully requested and stored ticket in "
+            + ccacheFile.getAbsolutePath());
+
+        if (ktOptions.contains(KinitOption.SERVICE)) {
+            System.out.println("Use tgt to request a service ticket.");
+            String servicePrincipal = ktOptions.getStringOption(KinitOption.SERVICE);
+            SgtTicket sgtTicket;
+            try {
+                sgtTicket = krbClient.requestSgt(tgt, servicePrincipal);
+            } catch (KrbException e) {
+                System.err.println("kinit: " + e.getKrbErrorCode().getMessage());
+                return;
+            }
+
+            System.out.println(sgtTicket.getEncKdcRepPart().getSname().getName() + ": knvo = "
+                + sgtTicket.getTicket().getEncryptedEncPart().getKvno());
+        }
+    }
+
+    /**
+     * Init the client.
+     */
+    private static KrbClient getClient(File confDir) throws KrbException {
+        KrbClient krbClient;
+
+        if (confDir != null) {
+            krbClient = new KrbClient(confDir);
+        } else {
+            krbClient = new KrbClient();
+        }
+
+        krbClient.init();
+        return krbClient;
+    }
+
+    /**
+     * Get credential cache file name if not specified.
+     */
+    private static String getCcacheName(KrbClient krbClient) {
+        final String ccacheNameEnv = System.getenv("KRB5CCNAME");
+        final String ccacheNameConf = krbClient.getSetting().getKrbConfig().getString("default_ccache_name");
+        String ccacheName;
+        if (ccacheNameEnv != null) {
+            ccacheName = ccacheNameEnv;
+        } else if (ccacheNameConf != null) {
+            ccacheName = ccacheNameConf;
+        } else {
+            StringBuilder uid = new StringBuilder();
+            try {
+                //Get UID through "id -u" command
+                String command = "id -u";
+                Process child = Runtime.getRuntime().exec(command);
+                InputStream in = child.getInputStream();
+                int c;
+                while ((c = in.read()) != -1) {
+                    uid.append((char) c);
+                }
+                in.close();
+            } catch (IOException e) {
+                System.err.println("Failed to get UID.");
+                System.exit(1);
+            }
+            ccacheName = "krb5cc_" + uid.toString().trim();
+            ccacheName = SysUtil.getTempDir().toString() + "/" + ccacheName;
+        }
+
+        return ccacheName;
+    }
+
+    public static void main(String[] args) {
+        KOptions ktOptions = new KOptions();
+        KinitOption kto;
+        String principal = null;
+
+        int i = 0;
+        String opt, param, error;
+        while (i < args.length) {
+            error = null;
+
+            opt = args[i++];
+            if (opt.startsWith("-")) {
+                kto = KinitOption.fromName(opt);
+                if (kto == KinitOption.NONE) {
+                    error = "Invalid option:" + opt;
+                    System.err.println(error);
+                    break;
+                }
+            } else {
+                principal = opt;
+                kto = KinitOption.NONE;
+            }
+
+            if (kto != KinitOption.NONE && kto.getOptionInfo().getType() != KOptionType.NOV) {
+                // require a parameter
+                param = null;
+                if (i < args.length) {
+                    param = args[i++];
+                }
+                if (param != null) {
+                    KOptions.parseSetValue(kto.getOptionInfo(), param);
+                } else {
+                    error = "Option " + opt + " require a parameter";
+                }
+            }
+
+            if (error != null) {
+                printUsage(error);
+            }
+            if (kto != KinitOption.NONE) {
+                ktOptions.add(kto);
+            }
+        }
+
+        if (!ktOptions.contains(KinitOption.CONF_DIR)) {
+            printUsage("No conf dir given.");
+        }
+
+        if (principal == null) {
+            if (!ktOptions.contains(KinitOption.SERVICE) && !ktOptions.contains(KinitOption.KRB5_CACHE)) {
+                printUsage("No principal is specified");
+            } else if (ktOptions.contains(KinitOption.SERVICE) && !ktOptions.contains(KinitOption.KRB5_CACHE)) {
+                printKvnoUsage("No credential cache file given.");
+            }
+        }
+
+        requestTicket(principal, ktOptions);
+        System.exit(0);
+    }
+
+    /**
+     * Convert kinit tool options to KOptions.
+     * @param toolOptions
+     * @return KOptions
+     */
+    static KOptions convertOptions(KOptions toolOptions) {
+        KOptions results = new KOptions();
+
+        for (KOption toolOpt : toolOptions.getOptions()) {
+            KOptionInfo kOptionInfo = toolOpt.getOptionInfo();
+            KOptionGroup group = kOptionInfo.getGroup();
+            KOption kOpt = null;
+
+            if (group == KrbOptionGroup.KRB) {
+                kOpt = KrbOption.fromOptionName(kOptionInfo.getName());
+            } else if (group == KrbOptionGroup.PKINIT) {
+                kOpt = PkinitOption.fromOptionName(kOptionInfo.getName());
+            } else if (group == KrbOptionGroup.TOKEN) {
+                kOpt = TokenOption.fromOptionName(kOptionInfo.getName());
+            } else if (group == KrbOptionGroup.KDC_FLAGS) {
+                kOpt = KrbKdcOption.fromOptionName(kOptionInfo.getName());
+            }
+            if (kOpt != null && kOpt.getOptionInfo() != KrbOption.NONE.getOptionInfo()) {
+                kOpt.getOptionInfo().setValue(toolOpt.getOptionInfo().getValue());
+                results.add(kOpt);
+            }
+        }
+
+        return results;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistOption.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistOption.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistOption.java
new file mode 100644
index 0000000..dab4d47
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistOption.java
@@ -0,0 +1,66 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.klist;
+
+import org.apache.kerby.KOption;
+import org.apache.kerby.KOptionInfo;
+import org.apache.kerby.KOptionType;
+
+public enum KlistOption implements KOption {
+    NONE(null),
+    CREDENTIALS_CACHE(new KOptionInfo("-c", "specifies path of credentials cache",
+        KOptionType.STR)),
+    KEYTAB(new KOptionInfo("-k", "specifies keytab")),
+    DEFAULT_CLIENT_KEYTAB(new KOptionInfo("-i", "uses default client keytab if no name given")),
+    LIST_CREDENTIAL_CACHES(new KOptionInfo("-l", "list credential caches in collection")),
+    ALL_CREDENTIAL_CACHES(new KOptionInfo("-A", "shows content of all credential caches")),
+    ENCRYPTION_TYPE(new KOptionInfo("-e", "shows encryption type")),
+    KERBEROS_VERSION(new KOptionInfo("-V", "shows Kerberos version")),
+    AUTHORIZATION_DATA_TYPE(new KOptionInfo("-d", "shows the submitted authorization data type")),
+    CREDENTIALS_FLAGS(new KOptionInfo("-f", "show credential flags")),
+    EXIT_TGT_EXISTENCE(new KOptionInfo("-s", "sets exit status based on valid tgt existence")),
+    DISPL_ADDRESS_LIST(new KOptionInfo("-a", "displays the address list")),
+    NO_REVERSE_RESOLVE(new KOptionInfo("-n", "do not reverse resolve")),
+    SHOW_KTAB_ENTRY_TS(new KOptionInfo("-t", "shows keytab entry timestamps")),
+    SHOW_KTAB_ENTRY_KEY(new KOptionInfo("-K", "show keytab entry keys"));
+
+    private final KOptionInfo optionInfo;
+
+    KlistOption(KOptionInfo optionInfo) {
+        this.optionInfo = optionInfo;
+    }
+
+    @Override
+    public KOptionInfo getOptionInfo() {
+        return optionInfo;
+    }
+
+    public static KlistOption fromName(String name) {
+        if (name != null) {
+            for (KlistOption ko : values()) {
+                if (ko.optionInfo != null
+                        && ko.optionInfo.getName().equals(name)) {
+                    return ko;
+                }
+            }
+        }
+        return NONE;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistTool.java
new file mode 100644
index 0000000..7143c04
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/klist/KlistTool.java
@@ -0,0 +1,293 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.klist;
+
+import org.apache.kerby.KOptionType;
+import org.apache.kerby.KOptions;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.ccache.Credential;
+import org.apache.kerby.kerberos.kerb.ccache.CredentialCache;
+import org.apache.kerby.kerberos.kerb.client.KrbClient;
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
+import org.apache.kerby.util.HexUtil;
+import org.apache.kerby.util.OSUtil;
+import org.apache.kerby.util.SysUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+
+/**
+ * klist like tool
+ *
+ * Ref. MIT klist command tool usage.
+ */
+public class KlistTool {
+    private static final Logger LOG = LoggerFactory.getLogger(KlistTool.class);
+
+    private static final String USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\klist.cmd" : "Usage: sh bin/klist.sh")
+            + " [-e] [-V] [[-c] [-l] [-A] [-d] [-f] [-s] "
+            + "[-a [-n]]] [-k [-t] [-K]] [name]\n"
+            + "\t-c specifies credentials cache\n"
+            + "\t-k specifies keytab\n"
+            + "\t   (Default is credentials cache)\n"
+            + "\t-i uses default client keytab if no name given\n"
+            + "\t-l lists credential caches in collection\n"
+            + "\t-A shows content of all credential caches\n"
+            + "\t-e shows the encryption type\n"
+            + "\t-V shows the Kerberos version and exits\n"
+            + "\toptions for credential caches:\n"
+            + "\t\t-d shows the submitted authorization data types\n"
+            + "\t\t-f shows credentials flags\n"
+            + "\t\t-s sets exit status based on valid tgt existence\n"
+            + "\t\t-a displays the address list\n"
+            + "\t\t\t-n do not reverse-resolve\n"
+            + "\toptions for keytabs:\n"
+            + "\t\t-t shows keytab entry timestamps\n"
+            + "\t\t-K shows keytab entry keys\n";
+
+    // option "-k" hava a optional parameter, "/etc/krb5.keytab" if not specified
+    private static String keytabFilePath = null;
+
+    private static void printUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(USAGE);
+        System.exit(-1);
+    }
+
+    private static int printCredentialCacheInfo(KOptions klOptions) {
+        CredentialCache cc = new CredentialCache();
+        List<Credential> credentials;
+        InputStream cis = null;
+        String fileName;
+
+        if (!klOptions.contains(KlistOption.CREDENTIALS_CACHE)) {
+            fileName = getCcacheName();
+        } else {
+            fileName = klOptions.getStringOption(KlistOption.CREDENTIALS_CACHE);
+        }
+        try {
+            cis = Files.newInputStream(Paths.get(fileName));
+            cc.load(cis);
+        } catch (IOException e) {
+            LOG.error("Failed to open CredentialCache from file: " + fileName + ". " + e.toString());
+        } finally {
+            try {
+                if (cis != null) {
+                    cis.close();
+                }
+            } catch (IOException e) {
+                LOG.warn("Fail to close input stream. " + e);
+            }
+        }
+
+        if (cc != null) {
+            credentials = cc.getCredentials();
+
+            System.out.println("Ticket cache: " + fileName);
+            System.out.println("Default principal: " + cc.getPrimaryPrincipal().getName());
+
+            if (credentials.isEmpty()) {
+                System.out.println("No credential has been cached.");
+            } else {
+                DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+                System.out.println("Valid starting\t\tExpires\t\t\tService principal");
+
+                for (Credential crd : credentials) {
+                    System.out.println(df.format(crd.getStartTime().getTime()) + "\t"
+                        + df.format(crd.getEndTime().getTime()) + "\t"
+                        + crd.getServerName() + "\n"
+                        + "\t" + "renew until" + "\t" + df.format(crd.getRenewTill().getTime()));
+                }
+            }
+        }
+
+        return 0;
+    }
+
+    /**
+     * Get credential cache file name if not specified.
+     */
+    private static String getCcacheName() {
+        String ccacheName;
+        String ccacheNameEnv = System.getenv("KRB5CCNAME");
+        String ccacheNameConf = null;
+        File confDir = new File("/etc");
+        try {
+            KrbClient krbClient = new KrbClient(confDir);
+            ccacheNameConf = krbClient.getSetting().getKrbConfig().getString("default_ccache_name");
+        } catch (KrbException e) {
+            System.err.println("Create krbClient failed: " + e.getMessage());
+            System.exit(1);
+        }
+        if (ccacheNameEnv != null) {
+            ccacheName = ccacheNameEnv;
+        } else if (ccacheNameConf != null) {
+            ccacheName = ccacheNameConf;
+        } else {
+            StringBuilder uid = new StringBuilder();
+            try {
+                //Get UID through "id -u" command
+                String command = "id -u";
+                Process child = Runtime.getRuntime().exec(command);
+                InputStream in = child.getInputStream();
+                int c;
+                while ((c = in.read()) != -1) {
+                    uid.append((char) c);
+                }
+                in.close();
+            } catch (IOException e) {
+                System.err.println("Failed to get UID.");
+                System.exit(1);
+            }
+            ccacheName = "krb5cc_" + uid.toString().trim();
+            ccacheName = SysUtil.getTempDir().toString() + "/" + ccacheName;
+        }
+
+        return ccacheName;
+    }
+
+    private static int printKeytabInfo(KOptions klOptions) {
+        String[] header = new String[4];
+        header[0] = "KVNO Principal\n"
+                + "---- --------------------------------------------------------------------------";
+        header[1] = header[0];
+        header[2] = "KVNO Timestamp           Principal\n"
+                + "---- ------------------- ------------------------------------------------------";
+        header[3] = header[2];
+        int outputIndex = 0;
+        if (klOptions.contains(KlistOption.SHOW_KTAB_ENTRY_TS)) {
+            outputIndex |= 2;
+        }
+        if (klOptions.contains(KlistOption.SHOW_KTAB_ENTRY_KEY)) {
+            outputIndex |= 1;
+        }
+        System.out.println("Keytab name: FILE:" + keytabFilePath);
+        try {
+            File keytabFile = new File(keytabFilePath);
+            if (!keytabFile.exists()) {
+                System.out.println("klist: Key table file '" + keytabFilePath + "' not found. ");
+                return 0;
+            }
+            System.out.println(header[outputIndex]);
+            SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss");
+            Keytab keytab = Keytab.loadKeytab(keytabFile);
+            List<PrincipalName> principals = keytab.getPrincipals();
+            for (PrincipalName principal : principals) {
+                List<KeytabEntry> keytabEntries = keytab.getKeytabEntries(principal);
+                for (KeytabEntry entry : keytabEntries) {
+                    StringBuilder sb = new StringBuilder();
+                    sb.append(String.format("%-4d ", entry.getKvno()));
+                    if ((outputIndex & 2) != 0) {
+                        Date date = new Date(entry.getTimestamp().getTime());
+                        sb.append(format.format(date));
+                        sb.append(' ');
+                    }
+                    sb.append(String.format("%s ", principal.getName()));
+                    if ((outputIndex & 1) != 0) {
+                        sb.append("(0x");
+                        sb.append(HexUtil.bytesToHex(entry.getKey().getKeyData()));
+                        sb.append(")");
+                    }
+                    System.out.println(sb);
+                }
+            }
+
+        } catch (IOException e) {
+            System.err.println("klist: Error while scan key table file '" + keytabFilePath + "'");
+        }
+        return 0;
+    }
+
+    private static int printInfo(KOptions klOptions) {
+        if (klOptions.contains(KlistOption.KEYTAB)) {
+            return printKeytabInfo(klOptions);
+        }
+        return printCredentialCacheInfo(klOptions);
+    }
+
+    public static void main(String[] args) throws Exception {
+        KOptions klOptions = new KOptions();
+        KlistOption klopt;
+        // String name = null;
+
+        int i = 0;
+        String opt, value, error;
+        while (i < args.length) {
+            error = null;
+            opt = args[i++];
+
+            if (opt.startsWith("-")) {
+                klopt = KlistOption.fromName(opt);
+                if (klopt == KlistOption.NONE) {
+                    error = "Invalid option:" + opt;
+                }
+            } else {
+                if (keytabFilePath == null && klOptions.contains(KlistOption.KEYTAB)) {
+                    keytabFilePath = opt;
+                }
+                break;
+            }
+
+            if (error == null && klopt.getOptionInfo().getType() != KOptionType.NOV) {
+                //needs value for this parameter
+                value = null;
+                if (i < args.length) {
+                    value = args[i++];
+                }
+                if (value != null) {
+                    KOptions.parseSetValue(klopt.getOptionInfo(), value);
+                } else {
+                    error = "Option" + klopt + "requires a following value";
+                }
+            }
+
+            if (error != null) {
+                printUsage(error);
+            }
+
+            klOptions.add(klopt);
+            if (klOptions.contains(KlistOption.KEYTAB)
+                && klOptions.contains(KlistOption.CREDENTIALS_CACHE)) {
+                error = "Can not use '-c' and '-k' at the same time ";
+                printUsage(error);
+            }
+        }
+
+        if (keytabFilePath == null) {
+            keytabFilePath = "/etc/krb5.keytab";
+        }
+
+        int errNo = KlistTool.printInfo(klOptions);
+        System.exit(errNo);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/pom.xml b/has/has-tool/has-server-tool/pom.xml
new file mode 100644
index 0000000..426eacf
--- /dev/null
+++ b/has/has-tool/has-server-tool/pom.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>has-tool</artifactId>
+        <groupId>org.apache.hadoop</groupId>
+        <version>1.0.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>has-server-tool</artifactId>
+
+    <dependencies>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>has-server</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>has-plugins</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.kerby</groupId>
+        <artifactId>kdc-tool</artifactId>
+        <version>${kerby.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.kerby</groupId>
+        <artifactId>kerb-identity</artifactId>
+        <version>${kerby.version}</version>
+      </dependency>
+    </dependencies>
+
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/HadminLocalTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/HadminLocalTool.java b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/HadminLocalTool.java
new file mode 100644
index 0000000..647ad4e
--- /dev/null
+++ b/has/has-tool/has-server-tool/src/main/java/org/apache/hadoop/has/tool/server/hadmin/local/HadminLocalTool.java
@@ -0,0 +1,265 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.server.hadmin.local;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.admin.LocalHasAdmin;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.AddPrincipalCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.AddPrincipalsCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.DeletePrincipalCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.DisableConfigureCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.EnableConfigureCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.ExportKeytabsCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.GetHostRolesCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.GetPrincipalCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.HadminCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.KeytabAddCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.ListPrincipalsCmd;
+import org.apache.hadoop.has.tool.server.hadmin.local.cmd.RenamePrincipalCmd;
+import org.apache.kerby.KOptions;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.KadminOption;
+import org.apache.kerby.kerberos.tool.kadmin.AuthUtil;
+import org.apache.kerby.kerberos.tool.kadmin.ToolUtil;
+import org.apache.kerby.util.OSUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.security.auth.login.LoginException;
+import java.io.File;
+import java.security.Principal;
+import java.util.Map;
+import java.util.Scanner;
+import java.util.Set;
+
+/**
+ * Ref. MIT kadmin cmd tool usage.
+ */
+public class HadminLocalTool {
+    private static final Logger LOG = LoggerFactory.getLogger(HadminLocalTool.class);
+    private static File confDir;
+
+    private static final String PROMPT = HadminLocalTool.class.getSimpleName() + ".local";
+    private static  final String USAGE = (OSUtil.isWindows()
+            ? "Usage: bin\\hadmin-local.cmd" : "Usage: sh bin/kadmin-local.sh")
+            + " <conf-dir> <-c cache_name>|<-k keytab>\n"
+            + "\tExample:\n"
+            + "\t\t"
+            + (OSUtil.isWindows()
+            ? "bin\\hadmin-local.cmd" : "sh bin/hadmin-local.sh")
+            + " conf -k admin.keytab\n";
+
+    private static void printUsage(String error) {
+        System.err.println(error + "\n");
+        System.err.println(USAGE);
+        System.exit(-1);
+    }
+
+    private static final String LEGAL_COMMANDS = "Available commands are: "
+        + "\n"
+        + "add_principal, addprinc\n"
+        + "                         Add principal\n"
+        + "delete_principal, delprinc\n"
+        + "                         Delete principal\n"
+        + "rename_principal, renprinc\n"
+        + "                         Rename principal\n"
+        + "get_principal, getprinc\n"
+        + "                         Get principal\n"
+        + "list_principals, listprincs\n"
+        + "                         List principals\n"
+        + "ktadd, xst\n"
+        + "                         Add entry(s) to a keytab\n"
+        + "get_hostroles, hostroles\n"
+        + "                         Get hostRoles\n"
+        + "export_keytabs, expkeytabs\n"
+        + "                         Export keytabs\n"
+        + "create_principals, creprincs\n"
+        + "                         Create principals\n"
+        + "enable_configure, enable\n"
+        + "                         Enable configure\n"
+        + "disable_configure, disable\n"
+        + "                         Disable configure\n";
+
+    private static void execute(LocalHasAdmin hadmin, String input) throws HasException {
+        // Omit the leading and trailing whitespace.
+        input = input.trim();
+        if (input.startsWith("cmd")) {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+
+        String[] items = input.split("\\s+");
+        String cmd = items[0];
+        HadminCmd executor;
+        if (cmd.startsWith("add_principal")
+            || cmd.startsWith("addprinc")) {
+            executor = new AddPrincipalCmd(hadmin);
+        } else if (cmd.startsWith("delete_principal")
+            || cmd.startsWith("delprinc")) {
+            executor = new DeletePrincipalCmd(hadmin);
+        } else if (cmd.startsWith("rename_principal")
+            || cmd.startsWith("renprinc")) {
+            executor = new RenamePrincipalCmd(hadmin);
+        } else if (cmd.startsWith("list_principals")
+            || cmd.startsWith("listprincs")) {
+            executor = new ListPrincipalsCmd(hadmin);
+        } else if (cmd.startsWith("ktadd")
+            || cmd.startsWith("xst")) {
+            executor = new KeytabAddCmd(hadmin);
+        } else if (cmd.startsWith("get_hostroles")
+            || cmd.startsWith("hostroles")) {
+            executor = new GetHostRolesCmd(hadmin);
+        } else if (cmd.startsWith("create_principals")
+            || cmd.startsWith("creprincs")) {
+            executor = new AddPrincipalsCmd(hadmin);
+        } else if (cmd.startsWith("export_keytabs")
+            || cmd.startsWith("expkeytabs")) {
+            executor = new ExportKeytabsCmd(hadmin);
+        } else if (cmd.startsWith("enable_configure")
+            || cmd.startsWith("enable")) {
+            executor = new EnableConfigureCmd(hadmin);
+        } else if (cmd.startsWith("disable_configure")
+            || cmd.startsWith("disable")) {
+            executor = new DisableConfigureCmd(hadmin);
+        }  else if (cmd.startsWith("get_principal")
+            || cmd.startsWith("getprinc")) {
+            executor = new GetPrincipalCmd(hadmin);
+        } else {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        executor.execute(items);
+    }
+
+    private static File getConfDir(String[] args) {
+        String envDir;
+        confDir = new File(args[0]);
+        if (confDir == null || !confDir.exists()) {
+            try {
+                Map<String, String> mapEnv = System.getenv();
+                envDir = mapEnv.get("KRB5_KDC_DIR");
+            } catch (SecurityException e) {
+                envDir = null;
+            }
+            if (envDir != null) {
+                confDir = new File(envDir);
+            } else {
+                confDir = new File("/etc/kerby/"); // for Linux. TODO: fix for Win etc.
+            }
+
+            if (!confDir.exists()) {
+                throw new RuntimeException("Can not locate KDC backend directory "
+                        + confDir.getAbsolutePath());
+            }
+        }
+        LOG.info("Conf dir:" + confDir.getAbsolutePath());
+        return confDir;
+    }
+
+    public static void main(String[] args) {
+
+        if (args.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        LocalHasAdmin hadmin;
+        try {
+            hadmin = new LocalHasAdmin(getConfDir(args));
+        } catch (KrbException e) {
+            System.err.println("Failed to init HasAdmin due to " + e.getMessage());
+            return;
+        }
+
+        KOptions kOptions = ToolUtil.parseOptions(args, 1, args.length - 1);
+        if (kOptions == null) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        String hadminPrincipal = hadmin.getHadminPrincipal();
+        Subject subject = null;
+        if (kOptions.contains(KadminOption.CCACHE)) {
+            File ccFile = kOptions.getFileOption(KadminOption.CCACHE);
+            if (ccFile == null || !ccFile.exists()) {
+                printUsage("Need the valid credentials cache file.");
+                return;
+            }
+            try {
+                subject = AuthUtil.loginUsingTicketCache(hadminPrincipal, ccFile);
+            } catch (LoginException e) {
+                System.err.println("Could not login with: " + hadminPrincipal
+                    + e.getMessage());
+                return;
+            }
+        } else if (kOptions.contains(KadminOption.K)) {
+            File keyTabFile = new File(kOptions.getStringOption(KadminOption.K));
+            if (keyTabFile == null || !keyTabFile.exists()) {
+                printUsage("Need the valid keytab file.");
+                return;
+            }
+            try {
+                subject = AuthUtil.loginUsingKeytab(hadminPrincipal, keyTabFile);
+            } catch (LoginException e) {
+                System.err.println("Could not login with: " + hadminPrincipal
+                    + e.getMessage());
+                return;
+            }
+        } else {
+            printUsage("No credentials cache file or keytab file for authentication.");
+        }
+        if (subject != null) {
+            Principal adminPrincipal = new KerberosPrincipal(hadminPrincipal);
+            Set<Principal> princSet = subject.getPrincipals();
+            if (princSet == null || princSet.isEmpty()) {
+                printUsage("The principals in subject is empty.");
+                return;
+            }
+            if (princSet.contains(adminPrincipal)) {
+                System.out.println("Login successful for user: " + hadminPrincipal);
+            } else {
+                printUsage("Login failure for " + hadminPrincipal);
+                return;
+            }
+        } else {
+            printUsage("The subject is null, login failure for " + hadminPrincipal);
+            return;
+        }
+        System.out.println("enter \"cmd\" to see legal commands.");
+        System.out.print(PROMPT + ": ");
+
+        try (Scanner scanner = new Scanner(System.in, "UTF-8")) {
+            String input = scanner.nextLine();
+
+            while (!(input.equals("quit") || input.equals("exit")
+                    || input.equals("q"))) {
+                try {
+                    execute(hadmin, input);
+                } catch (HasException e) {
+                    System.err.println(e.getMessage());
+                }
+                System.out.print(PROMPT + ": ");
+                input = scanner.nextLine();
+            }
+        }
+    }
+}


[07/10] directory-kerby git commit: Add the HAS project to Kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/KeyStoresFactory.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/KeyStoresFactory.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/KeyStoresFactory.java
new file mode 100644
index 0000000..83ad9a1
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/KeyStoresFactory.java
@@ -0,0 +1,254 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.hadoop.has.common.ssl;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.util.StringUtils;
+import org.apache.kerby.kerberos.kerb.client.KrbConfig;
+
+import javax.net.ssl.KeyManager;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.TrustManager;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.GeneralSecurityException;
+import java.security.KeyStore;
+import java.text.MessageFormat;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Interface that gives access to {@link KeyManager} and {@link TrustManager}
+ * implementations.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class KeyStoresFactory extends KrbConfig {
+
+  private static final Log LOG =
+    LogFactory.getLog(KeyStoresFactory.class);
+
+  public static final String SSL_KEYSTORE_LOCATION_TPL_KEY =
+    "ssl.{0}.keystore.location";
+  public static final String SSL_KEYSTORE_PASSWORD_TPL_KEY =
+    "ssl.{0}.keystore.password";
+  public static final String SSL_KEYSTORE_KEYPASSWORD_TPL_KEY =
+    "ssl.{0}.keystore.keypassword";
+  public static final String SSL_KEYSTORE_TYPE_TPL_KEY =
+    "ssl.{0}.keystore.type";
+
+  public static final String SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY =
+    "ssl.{0}.truststore.reload.interval";
+  public static final String SSL_TRUSTSTORE_LOCATION_TPL_KEY =
+    "ssl.{0}.truststore.location";
+  public static final String SSL_TRUSTSTORE_PASSWORD_TPL_KEY =
+    "ssl.{0}.truststore.password";
+  public static final String SSL_TRUSTSTORE_TYPE_TPL_KEY =
+    "ssl.{0}.truststore.type";
+
+  /**
+   * Default format of the keystore files.
+   */
+  public static final String DEFAULT_KEYSTORE_TYPE = "jks";
+
+  /**
+   * Reload interval in milliseconds.
+   */
+  public static final long DEFAULT_SSL_TRUSTSTORE_RELOAD_INTERVAL = 10000;
+
+  private HasConfig conf;
+  private KeyManager[] keyManagers;
+  private TrustManager[] trustManagers;
+  private ReloadingX509TrustManager trustManager;
+
+  /**
+   * Sets the configuration for the factory.
+   *
+   * @param conf the configuration for the factory.
+   */
+  public void setConf(HasConfig conf) {
+    this.conf = conf;
+  }
+
+  /**
+   * Returns the configuration of the factory.
+   *
+   * @return the configuration of the factory.
+   */
+  public HasConfig getConf() {
+    return conf;
+  }
+
+
+  /**
+   * Initializes the keystores of the factory.
+   *
+   * @param mode if the keystores are to be used in client or server mode.
+   * @throws IOException thrown if the keystores could not be initialized due
+   * to an IO error.
+   * @throws GeneralSecurityException thrown if the keystores could not be
+   * initialized due to an security error.
+   */
+  public void init(SSLFactory.Mode mode) throws IOException, GeneralSecurityException {
+     boolean requireClientCert =
+      conf.getBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY,
+          SSLFactory.DEFAULT_SSL_REQUIRE_CLIENT_CERT);
+
+    // certificate store
+    String keystoreType =
+      conf.getString(resolvePropertyName(mode, SSL_KEYSTORE_TYPE_TPL_KEY),
+               DEFAULT_KEYSTORE_TYPE);
+    KeyStore keystore = KeyStore.getInstance(keystoreType);
+    String keystoreKeyPassword = null;
+    if (requireClientCert || mode == SSLFactory.Mode.SERVER) {
+      String locationProperty =
+        resolvePropertyName(mode, SSL_KEYSTORE_LOCATION_TPL_KEY);
+      String keystoreLocation = conf.getString(locationProperty, "");
+      if (keystoreLocation.isEmpty()) {
+        throw new GeneralSecurityException("The property '" + locationProperty
+            + "' has not been set in the ssl configuration file.");
+      }
+      String passwordProperty =
+        resolvePropertyName(mode, SSL_KEYSTORE_PASSWORD_TPL_KEY);
+      String keystorePassword = getPassword(conf, passwordProperty, "");
+      if (keystorePassword.isEmpty()) {
+        throw new GeneralSecurityException("The property '" + passwordProperty
+            + "' has not been set in the ssl configuration file.");
+      }
+      String keyPasswordProperty =
+        resolvePropertyName(mode, SSL_KEYSTORE_KEYPASSWORD_TPL_KEY);
+      // Key password defaults to the same value as store password for
+      // compatibility with legacy configurations that did not use a separate
+      // configuration property for key password.
+      keystoreKeyPassword = getPassword(
+          conf, keyPasswordProperty, keystorePassword);
+      LOG.debug(mode.toString() + " KeyStore: " + keystoreLocation);
+
+      InputStream is = new FileInputStream(keystoreLocation);
+      try {
+        keystore.load(is, keystorePassword.toCharArray());
+      } finally {
+        is.close();
+      }
+      LOG.debug(mode.toString() + " Loaded KeyStore: " + keystoreLocation);
+    } else {
+      keystore.load(null, null);
+    }
+    KeyManagerFactory keyMgrFactory = KeyManagerFactory
+        .getInstance(SSLFactory.SSLCERTIFICATE);
+
+    keyMgrFactory.init(keystore, (keystoreKeyPassword != null)
+        ? keystoreKeyPassword.toCharArray() : null);
+    keyManagers = keyMgrFactory.getKeyManagers();
+
+    //trust store
+    String truststoreType =
+      conf.getString(resolvePropertyName(mode, SSL_TRUSTSTORE_TYPE_TPL_KEY),
+               DEFAULT_KEYSTORE_TYPE);
+
+    String locationProperty =
+      resolvePropertyName(mode, SSL_TRUSTSTORE_LOCATION_TPL_KEY);
+    String truststoreLocation = conf.getString(locationProperty, "");
+    if (!truststoreLocation.isEmpty()) {
+      String passwordProperty = resolvePropertyName(mode,
+          SSL_TRUSTSTORE_PASSWORD_TPL_KEY);
+      String truststorePassword = getPassword(conf, passwordProperty, "");
+      if (truststorePassword.isEmpty()) {
+        throw new GeneralSecurityException("The property '" + passwordProperty
+            + "' has not been set in the ssl configuration file.");
+      }
+      long truststoreReloadInterval =
+          conf.getLong(resolvePropertyName(mode, SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY),
+              DEFAULT_SSL_TRUSTSTORE_RELOAD_INTERVAL);
+
+      LOG.debug(mode.toString() + " TrustStore: " + truststoreLocation);
+
+      trustManager = new ReloadingX509TrustManager(truststoreType,
+          truststoreLocation,
+          truststorePassword,
+          truststoreReloadInterval);
+      trustManager.init();
+      LOG.debug(mode.toString() + " Loaded TrustStore: " + truststoreLocation);
+      trustManagers = new TrustManager[]{trustManager};
+    } else {
+      LOG.debug("The property '" + locationProperty + "' has not been set, "
+          + "no TrustStore will be loaded");
+      trustManagers = null;
+    }
+  }
+
+  String getPassword(HasConfig conf, String alias, String defaultPass) {
+    String password = defaultPass;
+    password = conf.getString(alias);
+    return password;
+  }
+
+  /**
+   * Releases any resources being used.
+   */
+  public void destroy() {
+    if (trustManager != null) {
+      trustManager.destroy();
+      trustManager = null;
+      keyManagers = null;
+      trustManagers = null;
+    }
+  }
+
+  /**
+   * Returns the keymanagers for owned certificates.
+   *
+   * @return the keymanagers for owned certificates.
+   */
+  public KeyManager[] getKeyManagers() {
+    return keyManagers;
+  }
+
+  /**
+   * Returns the trustmanagers for trusted certificates.
+   *
+   * @return the trustmanagers for trusted certificates.
+   */
+  public TrustManager[] getTrustManagers() {
+    return trustManagers;
+  }
+
+    /**
+   * Resolves a property name to its client/server version if applicable.
+   * <p/>
+   * NOTE: This method is public for testing purposes.
+   *
+   * @param mode client/server mode.
+   * @param template property name template.
+   * @return the resolved property name.
+   */
+  @VisibleForTesting
+  public static String resolvePropertyName(SSLFactory.Mode mode,
+                                           String template) {
+    return MessageFormat.format(
+        template, StringUtils.toLowerCase(mode.toString()));
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/ReloadingX509TrustManager.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/ReloadingX509TrustManager.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/ReloadingX509TrustManager.java
new file mode 100644
index 0000000..2aa2e6c
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/ReloadingX509TrustManager.java
@@ -0,0 +1,208 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.has.common.ssl;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.TrustManagerFactory;
+import javax.net.ssl.X509TrustManager;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.security.GeneralSecurityException;
+import java.security.KeyStore;
+import java.security.cert.CertificateException;
+import java.security.cert.X509Certificate;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * A {@link TrustManager} implementation that reloads its configuration when
+ * the truststore file on disk changes.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public final class ReloadingX509TrustManager
+  implements X509TrustManager, Runnable {
+
+  private static final Log LOG =
+    LogFactory.getLog(ReloadingX509TrustManager.class);
+
+  private String type;
+  private File file;
+  private String password;
+  private long lastLoaded;
+  private long reloadInterval;
+  private AtomicReference<X509TrustManager> trustManagerRef;
+
+  private volatile boolean running;
+  private Thread reloader;
+
+  /**
+   * Creates a reloadable trustmanager. The trustmanager reloads itself
+   * if the underlying trustore file has changed.
+   *
+   * @param type type of truststore file, typically 'jks'.
+   * @param location local path to the truststore file.
+   * @param password password of the truststore file.
+   * @param reloadInterval interval to check if the truststore file has
+   * changed, in milliseconds.
+   * @throws IOException thrown if the truststore could not be initialized due
+   * to an IO error.
+   * @throws GeneralSecurityException thrown if the truststore could not be
+   * initialized due to a security error.
+   */
+  public ReloadingX509TrustManager(String type, String location,
+                                   String password, long reloadInterval)
+    throws IOException, GeneralSecurityException {
+    this.type = type;
+    file = new File(location);
+    this.password = password;
+    trustManagerRef = new AtomicReference<X509TrustManager>();
+    trustManagerRef.set(loadTrustManager());
+    this.reloadInterval = reloadInterval;
+  }
+
+  /**
+   * Starts the reloader thread.
+   */
+  public void init() {
+    reloader = new Thread(this, "Truststore reloader thread");
+    reloader.setDaemon(true);
+    running =  true;
+    reloader.start();
+  }
+
+  /**
+   * Stops the reloader thread.
+   */
+  public void destroy() {
+    running = false;
+    reloader.interrupt();
+  }
+
+  /**
+   * Returns the reload check interval.
+   *
+   * @return the reload check interval, in milliseconds.
+   */
+  public long getReloadInterval() {
+    return reloadInterval;
+  }
+
+  @Override
+  public void checkClientTrusted(X509Certificate[] chain, String authType)
+    throws CertificateException {
+    X509TrustManager tm = trustManagerRef.get();
+    if (tm != null) {
+      tm.checkClientTrusted(chain, authType);
+    } else {
+      throw new CertificateException("Unknown client chain certificate: "
+          + chain[0].toString());
+    }
+  }
+
+  @Override
+  public void checkServerTrusted(X509Certificate[] chain, String authType)
+    throws CertificateException {
+    X509TrustManager tm = trustManagerRef.get();
+    if (tm != null) {
+      tm.checkServerTrusted(chain, authType);
+    } else {
+      throw new CertificateException("Unknown server chain certificate: "
+          + chain[0].toString());
+    }
+  }
+
+  private static final X509Certificate[] EMPTY = new X509Certificate[0];
+  @Override
+  public X509Certificate[] getAcceptedIssuers() {
+    X509Certificate[] issuers = EMPTY;
+    X509TrustManager tm = trustManagerRef.get();
+    if (tm != null) {
+      issuers = tm.getAcceptedIssuers();
+    }
+    return issuers;
+  }
+
+  boolean needsReload() {
+    boolean reload = true;
+    if (file.exists()) {
+      if (file.lastModified() == lastLoaded) {
+        reload = false;
+      }
+    } else {
+      lastLoaded = 0;
+    }
+    return reload;
+  }
+
+  X509TrustManager loadTrustManager()
+  throws IOException, GeneralSecurityException {
+    X509TrustManager trustManager = null;
+    KeyStore ks = KeyStore.getInstance(type);
+    lastLoaded = file.lastModified();
+    FileInputStream in = new FileInputStream(file);
+    try {
+      ks.load(in, password.toCharArray());
+      LOG.debug("Loaded truststore '" + file + "'");
+    } finally {
+      in.close();
+    }
+
+    TrustManagerFactory trustManagerFactory = 
+      TrustManagerFactory.getInstance(SSLFactory.SSLCERTIFICATE);
+    trustManagerFactory.init(ks);
+    TrustManager[] trustManagers = trustManagerFactory.getTrustManagers();
+    for (TrustManager trustManager1 : trustManagers) {
+      if (trustManager1 instanceof X509TrustManager) {
+        trustManager = (X509TrustManager) trustManager1;
+        break;
+      }
+    }
+    return trustManager;
+  }
+
+  @Override
+  public void run() {
+    while (running) {
+      try {
+        Thread.sleep(reloadInterval);
+      } catch (InterruptedException e) {
+        //NOP
+      }
+      if (running && needsReload()) {
+        try {
+          trustManagerRef.set(loadTrustManager());
+        } catch (Exception ex) {
+          LOG.warn("Could not load truststore (keep using existing one) : "
+              + ex.toString(), ex);
+        }
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLFactory.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLFactory.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLFactory.java
new file mode 100644
index 0000000..dcb5140
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLFactory.java
@@ -0,0 +1,290 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.hadoop.has.common.ssl;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.ConnectionConfigurator;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.util.StringUtils;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLEngine;
+import javax.net.ssl.SSLServerSocketFactory;
+import javax.net.ssl.SSLSocketFactory;
+import java.io.File;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.security.GeneralSecurityException;
+
+import static org.apache.hadoop.has.common.util.PlatformName.IBM_JAVA;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Factory that creates SSLEngine and SSLSocketFactory instances using
+ * Hadoop configuration information.
+ * <p/>
+ * which reloads public keys if the truststore file changes.
+ * <p/>
+ * This factory is used to configure HTTPS in Hadoop HTTP based endpoints, both
+ * client and server.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class SSLFactory implements ConnectionConfigurator {
+
+  @InterfaceAudience.Private
+  public enum Mode {
+    CLIENT, SERVER
+  }
+
+  public static final String SSL_REQUIRE_CLIENT_CERT_KEY =
+    "hadoop.ssl.require.client.CERT";
+  public static final String SSL_HOSTNAME_VERIFIER_KEY =
+    "hadoop.ssl.hostname.verifier";
+  public static final String SSL_CLIENT_CONF_KEY =
+    "hadoop.ssl.client.conf";
+  public static final String SSL_SERVER_CONF_KEY =
+      "hadoop.ssl.server.conf";
+  public static final String SSLCERTIFICATE = IBM_JAVA ? "ibmX509" : "SunX509";
+
+  public static final boolean DEFAULT_SSL_REQUIRE_CLIENT_CERT = false;
+
+  public static final String KEYSTORES_FACTORY_CLASS_KEY =
+    "hadoop.ssl.keystores.factory.class";
+
+  public static final String SSL_ENABLED_PROTOCOLS =
+      "hadoop.ssl.enabled.protocols";
+  public static final String DEFAULT_SSL_ENABLED_PROTOCOLS = "TLSv1";
+
+  private HasConfig conf;
+  private Mode mode;
+  private boolean requireClientCert;
+  private SSLContext context;
+  private HostnameVerifier hostnameVerifier;
+  private KeyStoresFactory keystoresFactory;
+
+  private String[] enabledProtocols = null;
+
+  /**
+   * Creates an SSLFactory.
+   *
+   * @param mode SSLFactory mode, client or server.
+   * @param conf Hadoop configuration from where the SSLFactory configuration
+   * will be read.
+   */
+  public SSLFactory(Mode mode, HasConfig conf) throws HasException {
+    this.conf = conf;
+    if (mode == null) {
+      throw new IllegalArgumentException("mode cannot be NULL");
+    }
+    this.mode = mode;
+    requireClientCert = conf.getBoolean(SSL_REQUIRE_CLIENT_CERT_KEY,
+                                        DEFAULT_SSL_REQUIRE_CLIENT_CERT);
+    HasConfig sslConf = readSSLConfiguration(mode);
+
+    keystoresFactory = new KeyStoresFactory();
+    keystoresFactory.setConf(sslConf);
+
+    enabledProtocols = new String[] {DEFAULT_SSL_ENABLED_PROTOCOLS};
+  }
+
+  private HasConfig readSSLConfiguration(Mode mode) throws HasException {
+    HasConfig sslConf = new HasConfig();
+    sslConf.setBoolean(SSL_REQUIRE_CLIENT_CERT_KEY, requireClientCert);
+    String sslConfResource;
+    if (mode == Mode.CLIENT) {
+      sslConfResource = conf.getString(SSLFactory.SSL_CLIENT_CONF_KEY);
+    } else {
+      sslConfResource = conf.getString(SSLFactory.SSL_CLIENT_CONF_KEY);
+    }
+    try {
+      sslConf.addIniConfig(new File(sslConfResource));
+    } catch (IOException e) {
+      throw new HasException(e);
+    }
+    return sslConf;
+  }
+
+  /**
+   * Initializes the factory.
+   *
+   * @throws  GeneralSecurityException thrown if an SSL initialization error
+   * happened.
+   * @throws IOException thrown if an IO error happened while reading the SSL
+   * configuration.
+   */
+  public void init() throws GeneralSecurityException, IOException {
+    keystoresFactory.init(mode);
+    context = SSLContext.getInstance("TLS");
+    context.init(keystoresFactory.getKeyManagers(),
+                 keystoresFactory.getTrustManagers(), null);
+    context.getDefaultSSLParameters().setProtocols(enabledProtocols);
+    hostnameVerifier = getHostnameVerifier(conf);
+  }
+
+  private HostnameVerifier getHostnameVerifier(HasConfig conf)
+      throws GeneralSecurityException, IOException {
+    return getHostnameVerifier(StringUtils.toUpperCase(
+        conf.getString(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").trim()));
+  }
+
+  public static HostnameVerifier getHostnameVerifier(String verifier)
+    throws GeneralSecurityException, IOException {
+    HostnameVerifier hostnameVerifier;
+    if (verifier.equals("DEFAULT")) {
+      hostnameVerifier = SSLHostnameVerifier.DEFAULT;
+    } else if (verifier.equals("DEFAULT_AND_LOCALHOST")) {
+      hostnameVerifier = SSLHostnameVerifier.DEFAULT_AND_LOCALHOST;
+    } else if (verifier.equals("STRICT")) {
+      hostnameVerifier = SSLHostnameVerifier.STRICT;
+    } else if (verifier.equals("STRICT_IE6")) {
+      hostnameVerifier = SSLHostnameVerifier.STRICT_IE6;
+    } else if (verifier.equals("ALLOW_ALL")) {
+      hostnameVerifier = SSLHostnameVerifier.ALLOW_ALL;
+    } else {
+      throw new GeneralSecurityException("Invalid hostname verifier: "
+          + verifier);
+    }
+    return hostnameVerifier;
+  }
+
+  /**
+   * Releases any resources being used.
+   */
+  public void destroy() {
+    keystoresFactory.destroy();
+  }
+  /**
+   * Returns the SSLFactory KeyStoresFactory instance.
+   *
+   * @return the SSLFactory KeyStoresFactory instance.
+   */
+  public KeyStoresFactory getKeystoresFactory() {
+    return keystoresFactory;
+  }
+
+  /**
+   * Returns a configured SSLEngine.
+   *
+   * @return the configured SSLEngine.
+   * @throws GeneralSecurityException thrown if the SSL engine could not
+   * be initialized.
+   * @throws IOException thrown if and IO error occurred while loading
+   * the server keystore.
+   */
+  public SSLEngine createSSLEngine()
+    throws GeneralSecurityException, IOException {
+    SSLEngine sslEngine = context.createSSLEngine();
+    if (mode == Mode.CLIENT) {
+      sslEngine.setUseClientMode(true);
+    } else {
+      sslEngine.setUseClientMode(false);
+      sslEngine.setNeedClientAuth(requireClientCert);
+    }
+    sslEngine.setEnabledProtocols(enabledProtocols);
+    return sslEngine;
+  }
+
+  /**
+   * Returns a configured SSLServerSocketFactory.
+   *
+   * @return the configured SSLSocketFactory.
+   * @throws GeneralSecurityException thrown if the SSLSocketFactory could not
+   * be initialized.
+   * @throws IOException thrown if and IO error occurred while loading
+   * the server keystore.
+   */
+  public SSLServerSocketFactory createSSLServerSocketFactory()
+    throws GeneralSecurityException, IOException {
+    if (mode != Mode.SERVER) {
+      throw new IllegalStateException("Factory is in CLIENT mode");
+    }
+    return context.getServerSocketFactory();
+  }
+
+  /**
+   * Returns a configured SSLSocketFactory.
+   *
+   * @return the configured SSLSocketFactory.
+   * @throws GeneralSecurityException thrown if the SSLSocketFactory could not
+   * be initialized.
+   * @throws IOException thrown if and IO error occurred while loading
+   * the server keystore.
+   */
+  public SSLSocketFactory createSSLSocketFactory()
+    throws GeneralSecurityException, IOException {
+    if (mode != Mode.CLIENT) {
+      throw new IllegalStateException("Factory is in CLIENT mode");
+    }
+    return context.getSocketFactory();
+  }
+
+  /**
+   * Returns the hostname verifier it should be used in HttpsURLConnections.
+   *
+   * @return the hostname verifier.
+   */
+  public HostnameVerifier getHostnameVerifier() {
+    if (mode != Mode.CLIENT) {
+      throw new IllegalStateException("Factory is in CLIENT mode");
+    }
+    return hostnameVerifier;
+  }
+
+  /**
+   * Returns if client certificates are required or not.
+   *
+   * @return if client certificates are required or not.
+   */
+  public boolean isClientCertRequired() {
+    return requireClientCert;
+  }
+
+  /**
+   * If the given {@link HttpURLConnection} is an {@link HttpsURLConnection}
+   * configures the connection with the {@link SSLSocketFactory} and
+   * {@link HostnameVerifier} of this SSLFactory, otherwise does nothing.
+   *
+   * @param conn the {@link HttpURLConnection} instance to configure.
+   * @return the configured {@link HttpURLConnection} instance.
+   *
+   * @throws IOException if an IO error occurred.
+   */
+  @Override
+  public HttpURLConnection configure(HttpURLConnection conn)
+    throws IOException {
+    if (conn instanceof HttpsURLConnection) {
+      HttpsURLConnection sslConn = (HttpsURLConnection) conn;
+      try {
+        sslConn.setSSLSocketFactory(createSSLSocketFactory());
+      } catch (GeneralSecurityException ex) {
+        throw new IOException(ex);
+      }
+      sslConn.setHostnameVerifier(getHostnameVerifier());
+      conn = sslConn;
+    }
+    return conn;
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLHostnameVerifier.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLHostnameVerifier.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLHostnameVerifier.java
new file mode 100644
index 0000000..86d6734
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/ssl/SSLHostnameVerifier.java
@@ -0,0 +1,615 @@
+/*
+ * $HeadURL$
+ * $Revision$
+ * $Date$
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ *
+ * This software consists of voluntary contributions made by many
+ * individuals on behalf of the Apache Software Foundation.  For more
+ * information on the Apache Software Foundation, please see
+ * <http://www.apache.org/>.
+ *
+ */
+
+package org.apache.hadoop.has.common.ssl;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.cert.Certificate;
+import java.security.cert.CertificateParsingException;
+import java.security.cert.X509Certificate;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.StringTokenizer;
+import java.util.TreeSet;
+
+import javax.net.ssl.SSLException;
+import javax.net.ssl.SSLPeerUnverifiedException;
+import javax.net.ssl.SSLSession;
+import javax.net.ssl.SSLSocket;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.has.common.util.StringUtils;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ ************************************************************************
+ * Copied from the not-yet-commons-ssl project at
+ * http://juliusdavies.ca/commons-ssl/
+ * This project is not yet in Apache, but it is Apache 2.0 licensed.
+ ************************************************************************
+ * Interface for checking if a hostname matches the names stored inside the
+ * server's X.509 certificate.  Correctly implements
+ * javax.net.ssl.HostnameVerifier, but that interface is not recommended.
+ * Instead we added several check() methods that take SSLSocket,
+ * or X509Certificate, or ultimately (they all end up calling this one),
+ * String.  (It's easier to supply JUnit with Strings instead of mock
+ * SSLSession objects!)
+ * </p><p>Our check() methods throw exceptions if the name is
+ * invalid, whereas javax.net.ssl.HostnameVerifier just returns true/false.
+ * <p/>
+ * We provide the HostnameVerifier.DEFAULT, HostnameVerifier.STRICT, and
+ * HostnameVerifier.ALLOW_ALL implementations.  We also provide the more
+ * specialized HostnameVerifier.DEFAULT_AND_LOCALHOST, as well as
+ * HostnameVerifier.STRICT_IE6.  But feel free to define your own
+ * implementations!
+ * <p/>
+ * Inspired by Sebastian Hauer's original StrictSSLProtocolSocketFactory in the
+ * HttpClient "contrib" repository.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
+
+    @Override
+    boolean verify(String host, SSLSession session);
+
+    void check(String host, SSLSocket ssl) throws IOException;
+
+    void check(String host, X509Certificate cert) throws SSLException;
+
+    void check(String host, String[] cns, String[] subjectAlts)
+        throws SSLException;
+
+    void check(String[] hosts, SSLSocket ssl) throws IOException;
+
+    void check(String[] hosts, X509Certificate cert) throws SSLException;
+
+
+    /**
+     * Checks to see if the supplied hostname matches any of the supplied CNs
+     * or "DNS" Subject-Alts.  Most implementations only look at the first CN,
+     * and ignore any additional CNs.  Most implementations do look at all of
+     * the "DNS" Subject-Alts. The CNs or Subject-Alts may contain wildcards
+     * according to RFC 2818.
+     *
+     * @param cns         CN fields, in order, as extracted from the X.509
+     *                    certificate.
+     * @param subjectAlts Subject-Alt fields of type 2 ("DNS"), as extracted
+     *                    from the X.509 certificate.
+     * @param hosts       The array of hostnames to verify.
+     * @throws SSLException If verification failed.
+     */
+    void check(String[] hosts, String[] cns, String[] subjectAlts)
+        throws SSLException;
+
+
+    /**
+     * The DEFAULT HostnameVerifier works the same way as Curl and Firefox.
+     * <p/>
+     * The hostname must match either the first CN, or any of the subject-alts.
+     * A wildcard can occur in the CN, and in any of the subject-alts.
+     * <p/>
+     * The only difference between DEFAULT and STRICT is that a wildcard (such
+     * as "*.foo.com") with DEFAULT matches all subdomains, including
+     * "a.b.foo.com".
+     */
+    SSLHostnameVerifier DEFAULT =
+        new AbstractVerifier() {
+            @Override
+            public final void check(final String[] hosts, final String[] cns,
+                                    final String[] subjectAlts)
+                throws SSLException {
+                check(hosts, cns, subjectAlts, false, false);
+            }
+
+            @Override
+            public final String toString() {
+                return "DEFAULT";
+            }
+        };
+
+
+    /**
+     * The DEFAULT_AND_LOCALHOST HostnameVerifier works like the DEFAULT
+     * one with one additional relaxation:  a host of "localhost",
+     * "localhost.localdomain", "127.0.0.1", "::1" will always pass, no matter
+     * what is in the server's certificate.
+     */
+    SSLHostnameVerifier DEFAULT_AND_LOCALHOST =
+        new AbstractVerifier() {
+            @Override
+            public final void check(final String[] hosts, final String[] cns,
+                                    final String[] subjectAlts)
+                throws SSLException {
+                if (isLocalhost(hosts[0])) {
+                    return;
+                }
+                check(hosts, cns, subjectAlts, false, false);
+            }
+
+            @Override
+            public final String toString() {
+                return "DEFAULT_AND_LOCALHOST";
+            }
+        };
+
+    /**
+     * The STRICT HostnameVerifier works the same way as java.net.URL in Sun
+     * Java 1.4, Sun Java 5, Sun Java 6.  It's also pretty close to IE6.
+     * This implementation appears to be compliant with RFC 2818 for dealing
+     * with wildcards.
+     * <p/>
+     * The hostname must match either the first CN, or any of the subject-alts.
+     * A wildcard can occur in the CN, and in any of the subject-alts.  The
+     * one divergence from IE6 is how we only check the first CN.  IE6 allows
+     * a match against any of the CNs present.  We decided to follow in
+     * Sun Java 1.4's footsteps and only check the first CN.
+     * <p/>
+     * A wildcard such as "*.foo.com" matches only subdomains in the same
+     * level, for example "a.foo.com".  It does not match deeper subdomains
+     * such as "a.b.foo.com".
+     */
+    SSLHostnameVerifier STRICT =
+        new AbstractVerifier() {
+            @Override
+            public final void check(final String[] host, final String[] cns,
+                                    final String[] subjectAlts)
+                throws SSLException {
+                check(host, cns, subjectAlts, false, true);
+            }
+
+            @Override
+            public final String toString() {
+                return "STRICT";
+            }
+        };
+
+    /**
+     * The STRICT_IE6 HostnameVerifier works just like the STRICT one with one
+     * minor variation:  the hostname can match against any of the CN's in the
+     * server's certificate, not just the first one.  This behaviour is
+     * identical to IE6's behaviour.
+     */
+    SSLHostnameVerifier STRICT_IE6 =
+        new AbstractVerifier() {
+            @Override
+            public final void check(final String[] host, final String[] cns,
+                                    final String[] subjectAlts)
+                throws SSLException {
+                check(host, cns, subjectAlts, true, true);
+            }
+
+            @Override
+            public final String toString() {
+                return "STRICT_IE6";
+            }
+        };
+
+    /**
+     * The ALLOW_ALL HostnameVerifier essentially turns hostname verification
+     * off.  This implementation is a no-op, and never throws the SSLException.
+     */
+    SSLHostnameVerifier ALLOW_ALL =
+        new AbstractVerifier() {
+            @Override
+            public final void check(final String[] host, final String[] cns,
+                                    final String[] subjectAlts) {
+                // Allow everything - so never blowup.
+            }
+
+            @Override
+            public final String toString() {
+                return "ALLOW_ALL";
+            }
+        };
+
+    abstract class AbstractVerifier implements SSLHostnameVerifier {
+
+        /**
+         * This contains a list of 2nd-level domains that aren't allowed to
+         * have wildcards when combined with country-codes.
+         * For example: [*.co.uk].
+         * <p/>
+         * The [*.co.uk] problem is an interesting one.  Should we just hope
+         * that CA's would never foolishly allow such a certificate to happen?
+         * Looks like we're the only implementation guarding against this.
+         * Firefox, Curl, Sun Java 1.4, 5, 6 don't bother with this check.
+         */
+        private static final String[] BAD_COUNTRY_2LDS =
+            {"ac", "co", "com", "ed", "edu", "go", "gouv", "gov", "info",
+                "lg", "ne", "net", "or", "org"};
+
+        private static final String[] LOCALHOSTS = {"::1", "127.0.0.1",
+            "localhost",
+            "localhost.localdomain"};
+
+
+        static {
+            // Just in case developer forgot to manually sort the array.  :-)
+            Arrays.sort(BAD_COUNTRY_2LDS);
+            Arrays.sort(LOCALHOSTS);
+        }
+
+        protected AbstractVerifier() {
+        }
+
+        /**
+         * The javax.net.ssl.HostnameVerifier contract.
+         *
+         * @param host    'hostname' we used to create our socket
+         * @param session SSLSession with the remote server
+         * @return true if the host matched the one in the certificate.
+         */
+        @Override
+        public boolean verify(String host, SSLSession session) {
+            try {
+                Certificate[] certs = session.getPeerCertificates();
+                X509Certificate x509 = (X509Certificate) certs[0];
+                check(new String[]{host}, x509);
+                return true;
+            } catch (SSLException e) {
+                return false;
+            }
+        }
+
+        @Override
+        public void check(String host, SSLSocket ssl) throws IOException {
+            check(new String[]{host}, ssl);
+        }
+
+        @Override
+        public void check(String host, X509Certificate cert)
+            throws SSLException {
+            check(new String[]{host}, cert);
+        }
+
+        @Override
+        public void check(String host, String[] cns, String[] subjectAlts)
+            throws SSLException {
+            check(new String[]{host}, cns, subjectAlts);
+        }
+
+        @Override
+        public void check(String[] host, SSLSocket ssl)
+            throws IOException {
+            if (host == null) {
+                throw new NullPointerException("host to verify is null");
+            }
+
+            SSLSession session = ssl.getSession();
+            if (session == null) {
+                // In our experience this only happens under IBM 1.4.x when
+                // spurious (unrelated) certificates show up in the server'
+                // chain.  Hopefully this will unearth the real problem:
+                InputStream in = ssl.getInputStream();
+                in.available();
+                /*
+                  If you're looking at the 2 lines of code above because
+                  you're running into a problem, you probably have two
+                  options:
+
+                    #1.  Clean up the certificate chain that your server
+                         is presenting (e.g. edit "/etc/apache2/server.crt"
+                         or wherever it is your server's certificate chain
+                         is defined).
+
+                                               OR
+
+                    #2.   Upgrade to an IBM 1.5.x or greater JVM, or switch
+                          to a non-IBM JVM.
+                */
+
+                // If ssl.getInputStream().available() didn't cause an
+                // exception, maybe at least now the session is available?
+                session = ssl.getSession();
+                if (session == null) {
+                    // If it's still null, probably a startHandshake() will
+                    // unearth the real problem.
+                    ssl.startHandshake();
+
+                    // Okay, if we still haven't managed to cause an exception,
+                    // might as well go for the NPE.  Or maybe we're okay now?
+                    session = ssl.getSession();
+                }
+            }
+            Certificate[] certs;
+            try {
+                certs = session.getPeerCertificates();
+            } catch (SSLPeerUnverifiedException spue) {
+                InputStream in = ssl.getInputStream();
+                in.available();
+                // Didn't trigger anything interesting?  Okay, just throw
+                // original.
+                throw spue;
+            }
+            X509Certificate x509 = (X509Certificate) certs[0];
+            check(host, x509);
+        }
+
+        @Override
+        public void check(String[] host, X509Certificate cert)
+            throws SSLException {
+            String[] cns = Certificates.getCNs(cert);
+            String[] subjectAlts = Certificates.getDNSSubjectAlts(cert);
+            check(host, cns, subjectAlts);
+        }
+
+        public void check(final String[] hosts, final String[] cns,
+                          final String[] subjectAlts, final boolean ie6,
+                          final boolean strictWithSubDomains)
+            throws SSLException {
+            // Build up lists of allowed hosts For logging/debugging purposes.
+            StringBuffer buf = new StringBuffer(32);
+            buf.append('<');
+            for (int i = 0; i < hosts.length; i++) {
+                String h = hosts[i];
+                h = h != null ? StringUtils.toLowerCase(h.trim()) : "";
+                hosts[i] = h;
+                if (i > 0) {
+                    buf.append('/');
+                }
+                buf.append(h);
+            }
+            buf.append('>');
+            String hostnames = buf.toString();
+            // Build the list of names we're going to check.  Our DEFAULT and
+            // STRICT implementations of the HostnameVerifier only use the
+            // first CN provided.  All other CNs are ignored.
+            // (Firefox, wget, curl, Sun Java 1.4, 5, 6 all work this way).
+            final Set<String> names = new TreeSet<String>();
+            if (cns != null && cns.length > 0 && cns[0] != null) {
+                names.add(cns[0]);
+                if (ie6) {
+                    for (int i = 1; i < cns.length; i++) {
+                        names.add(cns[i]);
+                    }
+                }
+            }
+            if (subjectAlts != null) {
+                for (int i = 0; i < subjectAlts.length; i++) {
+                    if (subjectAlts[i] != null) {
+                        names.add(subjectAlts[i]);
+                    }
+                }
+            }
+            if (names.isEmpty()) {
+                String msg = "Certificate for " + hosts[0] + " doesn't contain CN or DNS subjectAlt";
+                throw new SSLException(msg);
+            }
+
+            // StringBuffer for building the error message.
+            buf = new StringBuffer();
+
+            boolean match = false;
+            out:
+            for (Iterator<String> it = names.iterator(); it.hasNext();) {
+                // Don't trim the CN, though!
+                final String cn = StringUtils.toLowerCase(it.next());
+                // Store CN in StringBuffer in case we need to report an error.
+                buf.append(" <");
+                buf.append(cn);
+                buf.append('>');
+                if (it.hasNext()) {
+                    buf.append(" OR");
+                }
+
+                // The CN better have at least two dots if it wants wildcard
+                // action.  It also can't be [*.co.uk] or [*.co.jp] or
+                // [*.org.uk], etc...
+                boolean doWildcard = cn.startsWith("*.")
+                    && cn.lastIndexOf('.') >= 0
+                    && !isIP4Address(cn)
+                    && acceptableCountryWildcard(cn);
+
+                for (int i = 0; i < hosts.length; i++) {
+                    final String hostName =
+                        StringUtils.toLowerCase(hosts[i].trim());
+                    if (doWildcard) {
+                        match = hostName.endsWith(cn.substring(1));
+                        if (match && strictWithSubDomains) {
+                            // If we're in strict mode, then [*.foo.com] is not
+                            // allowed to match [a.b.foo.com]
+                            match = countDots(hostName) == countDots(cn);
+                        }
+                    } else {
+                        match = hostName.equals(cn);
+                    }
+                    if (match) {
+                        break out;
+                    }
+                }
+            }
+            if (!match) {
+                throw new SSLException("hostname in certificate didn't match: " + hostnames + " !=" + buf);
+            }
+        }
+
+        public static boolean isIP4Address(final String cn) {
+            boolean isIP4 = true;
+            String tld = cn;
+            int x = cn.lastIndexOf('.');
+            // We only bother analyzing the characters after the final dot
+            // in the name.
+            if (x >= 0 && x + 1 < cn.length()) {
+                tld = cn.substring(x + 1);
+            }
+            for (int i = 0; i < tld.length(); i++) {
+                if (!Character.isDigit(tld.charAt(0))) {
+                    isIP4 = false;
+                    break;
+                }
+            }
+            return isIP4;
+        }
+
+        public static boolean acceptableCountryWildcard(final String cn) {
+            int cnLen = cn.length();
+            if (cnLen >= 7 && cnLen <= 9) {
+                // Look for the '.' in the 3rd-last position:
+                if (cn.charAt(cnLen - 3) == '.') {
+                    // Trim off the [*.] and the [.XX].
+                    String s = cn.substring(2, cnLen - 3);
+                    // And test against the sorted array of bad 2lds:
+                    int x = Arrays.binarySearch(BAD_COUNTRY_2LDS, s);
+                    return x < 0;
+                }
+            }
+            return true;
+        }
+
+        public static boolean isLocalhost(String host) {
+            host = host != null ? StringUtils.toLowerCase(host.trim()) : "";
+            if (host.startsWith("::1")) {
+                int x = host.lastIndexOf('%');
+                if (x >= 0) {
+                    host = host.substring(0, x);
+                }
+            }
+            int x = Arrays.binarySearch(LOCALHOSTS, host);
+            return x >= 0;
+        }
+
+        /**
+         * Counts the number of dots "." in a string.
+         *
+         * @param s string to count dots from
+         * @return number of dots
+         */
+        public static int countDots(final String s) {
+            int count = 0;
+            for (int i = 0; i < s.length(); i++) {
+                if (s.charAt(i) == '.') {
+                    count++;
+                }
+            }
+            return count;
+        }
+    }
+
+    class Certificates {
+      public static String[] getCNs(X509Certificate cert) {
+        final List<String> cnList = new LinkedList<String>();
+        /*
+          Sebastian Hauer's original StrictSSLProtocolSocketFactory used
+          getName() and had the following comment:
+
+             Parses a X.500 distinguished name for the value of the
+             "Common Name" field.  This is done a bit sloppy right
+             now and should probably be done a bit more according to
+             <code>RFC 2253</code>.
+
+           I've noticed that toString() seems to do a better job than
+           getName() on these X500Principal objects, so I'm hoping that
+           addresses Sebastian's concern.
+
+           For example, getName() gives me this:
+           1.2.840.113549.1.9.1=#16166a756c6975736461766965734063756362632e636f6d
+
+           whereas toString() gives me this:
+           EMAILADDRESS=juliusdavies@cucbc.com
+
+           Looks like toString() even works with non-ascii domain names!
+           I tested it with "&#x82b1;&#x5b50;.co.jp" and it worked fine.
+          */
+        String subjectPrincipal = cert.getSubjectX500Principal().toString();
+        StringTokenizer st = new StringTokenizer(subjectPrincipal, ",");
+        while (st.hasMoreTokens()) {
+            String tok = st.nextToken();
+            int x = tok.indexOf("CN=");
+            if (x >= 0) {
+                cnList.add(tok.substring(x + 3));
+            }
+        }
+        if (!cnList.isEmpty()) {
+            String[] cns = new String[cnList.size()];
+            cnList.toArray(cns);
+            return cns;
+        } else {
+            return null;
+        }
+      }
+
+
+      /**
+       * Extracts the array of SubjectAlt DNS names from an X509Certificate.
+       * Returns null if there aren't any.
+       * <p/>
+       * Note:  Java doesn't appear able to extract international characters
+       * from the SubjectAlts.  It can only extract international characters
+       * from the CN field.
+       * <p/>
+       * (Or maybe the version of OpenSSL I'm using to test isn't storing the
+       * international characters correctly in the SubjectAlts?).
+       *
+       * @param cert X509Certificate
+       * @return Array of SubjectALT DNS names stored in the certificate.
+       */
+      public static String[] getDNSSubjectAlts(X509Certificate cert) {
+          final List<String> subjectAltList = new LinkedList<String>();
+          Collection<List<?>> c = null;
+          try {
+              c = cert.getSubjectAlternativeNames();
+          } catch (CertificateParsingException cpe) {
+              // Should probably log.debug() this?
+              cpe.printStackTrace();
+          }
+          if (c != null) {
+              Iterator<List<?>> it = c.iterator();
+              while (it.hasNext()) {
+                  List<?> list = it.next();
+                  int type = ((Integer) list.get(0)).intValue();
+                  // If type is 2, then we've got a dNSName
+                  if (type == 2) {
+                      String s = (String) list.get(1);
+                      subjectAltList.add(s);
+                  }
+              }
+          }
+          if (!subjectAltList.isEmpty()) {
+              String[] subjectAlts = new String[subjectAltList.size()];
+              subjectAltList.toArray(subjectAlts);
+              return subjectAlts;
+          } else {
+              return null;
+          }
+      }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/util/ConnectionConfigurator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/ConnectionConfigurator.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/ConnectionConfigurator.java
new file mode 100644
index 0000000..3d5dd39
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/ConnectionConfigurator.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.hadoop.has.common.util;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Interface to configure  {@link HttpURLConnection} created by
+ * {@link org.apache.hadoop.has.common.spnego.AuthenticatedURL} instances.
+ */
+public interface ConnectionConfigurator {
+
+  /**
+   * Configures the given {@link HttpURLConnection} instance.
+   *
+   * @param conn the {@link HttpURLConnection} instance to configure.
+   * @return the configured {@link HttpURLConnection} instance.
+   * 
+   * @throws IOException if an IO error occurred.
+   */
+  HttpURLConnection configure(HttpURLConnection conn) throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasJaasLoginUtil.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasJaasLoginUtil.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasJaasLoginUtil.java
new file mode 100644
index 0000000..e824ea4
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasJaasLoginUtil.java
@@ -0,0 +1,261 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.common.util;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
+import java.io.File;
+import java.io.IOException;
+import java.security.Principal;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * JAAS utilities for Has login.
+ */
+public class HasJaasLoginUtil {
+    public static final Logger LOG = LoggerFactory.getLogger(HasJaasLoginUtil.class);
+
+    public static final boolean ENABLE_DEBUG = true;
+
+    private static String getKrb5LoginModuleName() {
+        return System.getProperty("java.vendor").contains("IBM")
+            ? "com.ibm.security.auth.module.Krb5LoginModule"
+            : "org.apache.hadoop.has.client.HasLoginModule";
+    }
+
+    /**
+     * Log a user in from a tgt ticket.
+     *
+     * @throws IOException
+     */
+    public static synchronized Subject loginUserFromTgtTicket(String hadoopSecurityHas) throws IOException {
+
+        TICKET_KERBEROS_OPTIONS.put("hadoopSecurityHas", hadoopSecurityHas);
+        Subject subject = new Subject();
+        Configuration conf = new HasJaasConf();
+        String confName = "ticket-kerberos";
+        LoginContext loginContext = null;
+        try {
+            loginContext = new LoginContext(confName, subject, null, conf);
+        } catch (LoginException e) {
+            throw new IOException("Fail to create LoginContext for " + e);
+        }
+        try {
+            loginContext.login();
+            LOG.info("Login successful for user "
+                + subject.getPrincipals().iterator().next().getName());
+        } catch (LoginException e) {
+            throw new IOException("Login failure for " + e);
+        }
+        return loginContext.getSubject();
+    }
+
+    /**
+     * Has Jaas config.
+     */
+    static class HasJaasConf extends Configuration {
+        @Override
+        public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+
+            return new AppConfigurationEntry[]{
+                TICKET_KERBEROS_LOGIN};
+        }
+    }
+
+    private static final Map<String, String> BASIC_JAAS_OPTIONS =
+        new HashMap<String, String>();
+
+    static {
+        String jaasEnvVar = System.getenv("HADOOP_JAAS_DEBUG");
+        if (jaasEnvVar != null && "true".equalsIgnoreCase(jaasEnvVar)) {
+            BASIC_JAAS_OPTIONS.put("debug", String.valueOf(ENABLE_DEBUG));
+        }
+    }
+
+    private static final Map<String, String> TICKET_KERBEROS_OPTIONS =
+        new HashMap<String, String>();
+
+    static {
+        TICKET_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+        TICKET_KERBEROS_OPTIONS.put("useTgtTicket", "true");
+        TICKET_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
+    }
+
+    private static final AppConfigurationEntry TICKET_KERBEROS_LOGIN =
+        new AppConfigurationEntry(getKrb5LoginModuleName(),
+            AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL,
+            TICKET_KERBEROS_OPTIONS);
+
+
+     public static Subject loginUsingTicketCache(
+        String principal, File cacheFile) throws IOException {
+        Set<Principal> principals = new HashSet<Principal>();
+        principals.add(new KerberosPrincipal(principal));
+
+        Subject subject = new Subject(false, principals,
+            new HashSet<Object>(), new HashSet<Object>());
+
+        Configuration conf = useTicketCache(principal, cacheFile);
+        String confName = "TicketCacheConf";
+         LoginContext loginContext = null;
+         try {
+             loginContext = new LoginContext(confName, subject, null, conf);
+         } catch (LoginException e) {
+             throw new IOException("Faill to create LoginContext for " + e);
+         }
+         try {
+             loginContext.login();
+             LOG.info("Login successful for user "
+                 + subject.getPrincipals().iterator().next().getName());
+         } catch (LoginException e) {
+             throw new IOException("Login failure for " + e);
+         }
+         return loginContext.getSubject();
+    }
+
+    public static Subject loginUsingKeytab(
+        String principal, File keytabFile) throws IOException {
+        Set<Principal> principals = new HashSet<Principal>();
+        principals.add(new KerberosPrincipal(principal));
+
+        Subject subject = new Subject(false, principals,
+            new HashSet<Object>(), new HashSet<Object>());
+
+        Configuration conf = useKeytab(principal, keytabFile);
+        String confName = "KeytabConf";
+        LoginContext loginContext = null;
+        try {
+            loginContext = new LoginContext(confName, subject, null, conf);
+        } catch (LoginException e) {
+            throw new IOException("Fail to create LoginContext for " + e);
+        }
+        try {
+            loginContext.login();
+             LOG.info("Login successful for user "
+                + subject.getPrincipals().iterator().next().getName());
+        } catch (LoginException e) {
+            throw new IOException("Login failure for " + e);
+        }
+        return loginContext.getSubject();
+    }
+
+    public static LoginContext loginUsingKeytabReturnContext(
+        String principal, File keytabFile) throws IOException {
+        Set<Principal> principals = new HashSet<Principal>();
+        principals.add(new KerberosPrincipal(principal));
+
+        Subject subject = new Subject(false, principals,
+            new HashSet<Object>(), new HashSet<Object>());
+
+        Configuration conf = useKeytab(principal, keytabFile);
+        String confName = "KeytabConf";
+        LoginContext loginContext = null;
+        try {
+            loginContext = new LoginContext(confName, subject, null, conf);
+        } catch (LoginException e) {
+            throw new IOException("Fail to create LoginContext for " + e);
+        }
+        try {
+            loginContext.login();
+            LOG.info("Login successful for user "
+                + subject.getPrincipals().iterator().next().getName());
+        } catch (LoginException e) {
+            throw new IOException("Login failure for " + e);
+        }
+        return loginContext;
+    }
+
+    public static Configuration useTicketCache(String principal,
+                                               File credentialFile) {
+        return new TicketCacheJaasConf(principal, credentialFile);
+    }
+
+    public static Configuration useKeytab(String principal, File keytabFile) {
+        return new KeytabJaasConf(principal, keytabFile);
+    }
+
+    static class TicketCacheJaasConf extends Configuration {
+        private String principal;
+        private File clientCredentialFile;
+
+        TicketCacheJaasConf(String principal, File clientCredentialFile) {
+            this.principal = principal;
+            this.clientCredentialFile = clientCredentialFile;
+        }
+
+        @Override
+        public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+            Map<String, String> options = new HashMap<String, String>();
+            options.put("principal", principal);
+            options.put("storeKey", "false");
+            options.put("doNotPrompt", "false");
+            options.put("useTicketCache", "true");
+            options.put("renewTGT", "true");
+            options.put("refreshKrb5Config", "true");
+            options.put("isInitiator", "true");
+            options.put("ticketCache", clientCredentialFile.getAbsolutePath());
+            options.putAll(BASIC_JAAS_OPTIONS);
+
+            return new AppConfigurationEntry[]{
+                new AppConfigurationEntry(getKrb5LoginModuleName(),
+                    AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                    options)};
+        }
+    }
+
+    static class KeytabJaasConf extends Configuration {
+        private String principal;
+        private File keytabFile;
+
+        KeytabJaasConf(String principal, File keytab) {
+            this.principal = principal;
+            this.keytabFile = keytab;
+        }
+
+        @Override
+        public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+            Map<String, String> options = new HashMap<String, String>();
+            options.put("keyTab", keytabFile.getAbsolutePath());
+            options.put("principal", principal);
+            options.put("useKeyTab", "true");
+            options.put("storeKey", "true");
+            options.put("doNotPrompt", "true");
+            options.put("renewTGT", "false");
+            options.put("refreshKrb5Config", "true");
+            options.put("isInitiator", "true");
+            options.putAll(BASIC_JAAS_OPTIONS);
+
+            return new AppConfigurationEntry[]{
+                new AppConfigurationEntry(getKrb5LoginModuleName(),
+                    AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                    options)};
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasUtil.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasUtil.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasUtil.java
new file mode 100644
index 0000000..1d9f4b7
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/HasUtil.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.common.util;
+
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.crypto.EncryptionHandler;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.PrintStream;
+
+public class HasUtil {
+
+    public static EncryptionKey getClientKey(String userName, String passPhrase,
+                                             EncryptionType type) throws KrbException {
+        EncryptionKey clientKey = EncryptionHandler.string2Key(userName,
+            passPhrase, type);
+        return clientKey;
+    }
+
+    /**
+     * Get has configuration
+     * @param hasConfFile configuration directory
+     * @return has configuration
+     * @throws KrbException e
+     */
+    public static HasConfig getHasConfig(File hasConfFile) throws HasException {
+
+        if (hasConfFile.exists()) {
+            HasConfig hasConfig = new HasConfig();
+            try {
+                hasConfig.addIniConfig(hasConfFile);
+            } catch (IOException e) {
+                throw new HasException("Can not load the has configuration file "
+                    + hasConfFile.getAbsolutePath());
+            }
+            return hasConfig;
+        }
+
+        return null;
+    }
+
+    public static void setEnableConf(File hasConfFile, String value)
+            throws HasException, IOException {
+        String oldValue = getHasConfig(hasConfFile).getEnableConf();
+        if (oldValue == null) {
+            throw new HasException("Please set enable_conf in has-server.conf.");
+        }
+        if (oldValue.equals(value)) {
+            return;
+        }
+        try {
+            BufferedReader bf = new BufferedReader(new FileReader(hasConfFile));
+            StringBuilder sb = new StringBuilder();
+            String tempString;
+            while ((tempString = bf.readLine()) != null) {
+                if (tempString.trim().startsWith("enable_conf")) {
+                    tempString = tempString.replace(oldValue, value);
+                }
+                sb.append(tempString + "\n");
+            }
+            PrintStream ps = new PrintStream(new FileOutputStream(hasConfFile));
+            ps.print(sb.toString());
+            bf.close();
+        } catch (FileNotFoundException e) {
+            throw new HasException("Can not load the has configuration file "
+                    + hasConfFile.getAbsolutePath());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/util/PlatformName.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/PlatformName.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/PlatformName.java
new file mode 100644
index 0000000..6f64c62
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/PlatformName.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.has.common.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * A helper class for getting build-info of the java-vm.
+ *
+ */
+@InterfaceAudience.LimitedPrivate({"HBase"})
+@InterfaceStability.Unstable
+public class PlatformName {
+  /**
+   * The complete platform 'name' to identify the platform as
+   * per the java-vm.
+   */
+  public static final String PLATFORM_NAME =
+      (System.getProperty("os.name").startsWith("Windows")
+      ? System.getenv("os") : System.getProperty("os.name"))
+      + "-" + System.getProperty("os.arch")
+      + "-" + System.getProperty("sun.arch.data.model");
+
+  /**
+   * The java vendor name used in this platform.
+   */
+  public static final String JAVA_VENDOR_NAME = System.getProperty("java.vendor");
+
+  /**
+   * A public static variable to indicate the current java vendor is
+   * IBM java or not.
+   */
+  public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM");
+
+  public static void main(String[] args) {
+    System.out.println(PLATFORM_NAME);
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/util/StringUtils.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/StringUtils.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/StringUtils.java
new file mode 100644
index 0000000..2b00904
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/StringUtils.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.has.common.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import java.util.Locale;
+
+/**
+ * General string utils
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class StringUtils {
+
+  /**
+   * Converts all of the characters in this String to lower case with
+   * Locale.ENGLISH.
+   *
+   * @param str  string to be converted
+   * @return     the str, converted to lowercase.
+   */
+  public static String toLowerCase(String str) {
+    return str.toLowerCase(Locale.ENGLISH);
+  }
+
+  /**
+   * Converts all of the characters in this String to upper case with
+   * Locale.ENGLISH.
+   *
+   * @param str  string to be converted
+   * @return     the str, converted to uppercase.
+   */
+  public static String toUpperCase(String str) {
+    return str.toUpperCase(Locale.ENGLISH);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/util/URLConnectionFactory.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/util/URLConnectionFactory.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/URLConnectionFactory.java
new file mode 100644
index 0000000..a818864
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/util/URLConnectionFactory.java
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.has.common.util;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.spnego.AuthenticatedURL;
+import org.apache.hadoop.has.common.spnego.AuthenticationException;
+import org.apache.hadoop.has.common.spnego.KerberosHasAuthenticator;
+import org.apache.hadoop.has.common.ssl.SSLFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.SSLSocketFactory;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.net.URLConnection;
+import java.security.GeneralSecurityException;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Utilities for handling URLs
+ */
+@InterfaceStability.Unstable
+public class URLConnectionFactory {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(URLConnectionFactory.class);
+
+  /**
+   * Timeout for socket connects and reads
+   */
+   // 1 minute
+  public static final int DEFAULT_SOCKET_TIMEOUT = 60 * 1000;
+  private final ConnectionConfigurator connConfigurator;
+
+  private static final ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR
+      = new ConnectionConfigurator() {
+        @Override
+        public HttpURLConnection configure(HttpURLConnection conn)
+            throws IOException {
+          URLConnectionFactory.setTimeouts(conn,
+                                           DEFAULT_SOCKET_TIMEOUT,
+                                           DEFAULT_SOCKET_TIMEOUT);
+          return conn;
+        }
+      };
+
+  /**
+   * The URLConnectionFactory that sets the default timeout and it only trusts
+   * Java's SSL certificates.
+   */
+  public static final URLConnectionFactory DEFAULT_SYSTEM_CONNECTION_FACTORY =
+      new URLConnectionFactory(DEFAULT_TIMEOUT_CONN_CONFIGURATOR);
+
+  /**
+   * Construct a new URLConnectionFactory based on the configuration. It will
+   * try to load SSL certificates when it is specified.
+   */
+  public static URLConnectionFactory newDefaultURLConnectionFactory(HasConfig conf) {
+    ConnectionConfigurator conn = null;
+    try {
+      conn = newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
+    } catch (Exception e) {
+      LOG.debug(
+          "Cannot load customized ssl related configuration. Fallback to system-generic settings.",
+          e);
+      conn = DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
+    }
+    return new URLConnectionFactory(conn);
+  }
+
+  private static ConnectionConfigurator getSSLConnectionConfiguration(
+      HasConfig conf) {
+    ConnectionConfigurator conn;
+    try {
+      conn = newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
+    } catch (Exception e) {
+      LOG.warn(
+          "Cannot load customized ssl related configuration. Fallback to"
+              + " system-generic settings.",
+          e);
+      conn = DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
+    }
+
+    return conn;
+  }
+
+  @VisibleForTesting
+  URLConnectionFactory(ConnectionConfigurator connConfigurator) {
+    this.connConfigurator = connConfigurator;
+  }
+
+  /**
+   * Create a new ConnectionConfigurator for SSL connections
+   */
+  private static ConnectionConfigurator newSslConnConfigurator(
+      final int defaultTimeout, HasConfig conf)
+      throws IOException, GeneralSecurityException, HasException {
+    final SSLFactory factory;
+    final SSLSocketFactory sf;
+    final HostnameVerifier hv;
+    final int connectTimeout;
+    final int readTimeout;
+
+    factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
+    factory.init();
+    sf = factory.createSSLSocketFactory();
+    hv = factory.getHostnameVerifier();
+
+    connectTimeout = defaultTimeout;
+
+    readTimeout = defaultTimeout;
+
+    return new ConnectionConfigurator() {
+      @Override
+      public HttpURLConnection configure(HttpURLConnection conn)
+          throws IOException {
+        if (conn instanceof HttpsURLConnection) {
+          HttpsURLConnection c = (HttpsURLConnection) conn;
+          c.setSSLSocketFactory(sf);
+          c.setHostnameVerifier(hv);
+        }
+        URLConnectionFactory.setTimeouts(conn, connectTimeout, readTimeout);
+        return conn;
+      }
+    };
+  }
+
+  /**
+   * Opens a url with read and connect timeouts
+   *
+   * @param url
+   *          to open
+   * @return URLConnection
+   * @throws IOException
+   */
+  public URLConnection openConnection(URL url) throws IOException {
+    try {
+      return openConnection(url, false, null);
+    } catch (AuthenticationException e) {
+      // Unreachable
+      LOG.error("Open connection {} failed", url, e);
+      return null;
+    }
+  }
+
+  /**
+   * Opens a url with read and connect timeouts
+   *
+   * @param url
+   *          URL to open
+   * @param isSpnego
+   *          whether the url should be authenticated via SPNEGO
+   * @return URLConnection
+   * @throws IOException
+   * @throws AuthenticationException
+   */
+  public URLConnection openConnection(URL url, boolean isSpnego, HasConfig hasConfig)
+      throws IOException, AuthenticationException {
+    if (isSpnego && (hasConfig != null)) {
+      LOG.debug("open AuthenticatedURL connection {}", url);
+//      UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab();
+      final AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
+      return new AuthenticatedURL(new KerberosHasAuthenticator(hasConfig.getAdminKeytab(),
+          hasConfig.getAdminKeytabPrincipal()),
+          connConfigurator).openConnection(url, authToken);
+    } else {
+      LOG.debug("open URL connection");
+      URLConnection connection = url.openConnection();
+      if (connection instanceof HttpURLConnection) {
+        connConfigurator.configure((HttpURLConnection) connection);
+      }
+      return connection;
+    }
+  }
+
+  /**
+   * Sets timeout parameters on the given URLConnection.
+   *
+   * @param connection
+   *          URLConnection to set
+   * @param connectTimeout
+   *          the connection and read timeout of the connection.
+   */
+  private static void setTimeouts(URLConnection connection,
+                                  int connectTimeout,
+                                  int readTimeout) {
+    connection.setConnectTimeout(connectTimeout);
+    connection.setReadTimeout(readTimeout);
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/assembly.xml
----------------------------------------------------------------------
diff --git a/has/has-dist/assembly.xml b/has/has-dist/assembly.xml
new file mode 100644
index 0000000..ce79af7
--- /dev/null
+++ b/has/has-dist/assembly.xml
@@ -0,0 +1,56 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+          xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+  <id>bin</id>
+  <formats>
+    <format>tar.gz</format>
+  </formats>
+  <fileSets>
+    <fileSet>
+      <directory>bin</directory>
+      <outputDirectory>bin</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>conf</directory>
+      <outputDirectory>conf</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>webapps</directory>
+      <outputDirectory>webapps</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>target/lib</directory>
+      <outputDirectory>target/lib</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>log4j.properties</include>
+      </includes>
+    </fileSet>
+    <fileSet>
+      <directory>${basedir}/..</directory>
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>LICENSE</include>
+        <include>README.md</include>
+      </includes>
+    </fileSet>
+  </fileSets>
+</assembly>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/hadmin-local.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/hadmin-local.sh b/has/has-dist/bin/hadmin-local.sh
new file mode 100644
index 0000000..5a7eb3e
--- /dev/null
+++ b/has/has-dist/bin/hadmin-local.sh
@@ -0,0 +1,54 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+APP_MAIN=org.apache.hadoop.has.tool.server.hadmin.local.HadminLocalTool
+
+# Reset HAS_CONF_DIR if CONF_DIR not null
+if [ X"$1" = X"-k" ]; then
+  if [ "$HAS_CONF_DIR" != "" ] && [ -d "$HAS_CONF_DIR" ]; then
+    CONF_DIR=${HAS_CONF_DIR}
+  else
+    echo "[ERROR] HAS_CONF_DIR is null or not a directory"
+    exit
+  fi
+else
+  CONF_DIR=$1
+  if [ "$CONF_DIR" != "" ]; then
+    if [ ! -d "$CONF_DIR" ]; then
+      echo "[ERROR] ${CONF_DIR} is not a directory"
+    fi
+  fi
+fi
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+  if [ X"$var" = X"-D" ]; then
+    DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8002,server=y,suspend=n"
+  elif [ ! -d "$var" ]; then
+    args="$args $var"
+  fi
+done
+
+echo "[INFO] conf_dir=$CONF_DIR"
+HAS_OPTS="-DHAS_LOGFILE=hadmin"
+
+java ${DEBUG} -classpath target/lib/*:. ${HAS_OPTS} ${APP_MAIN} ${CONF_DIR} ${args}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/hadmin-remote.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/hadmin-remote.sh b/has/has-dist/bin/hadmin-remote.sh
new file mode 100644
index 0000000..233c056
--- /dev/null
+++ b/has/has-dist/bin/hadmin-remote.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+CONF_DIR=$1
+APP_MAIN=org.apache.hadoop.has.tool.client.hadmin.remote.HadminRemoteTool
+
+# Reset HAS_CONF_DIR if CONF_DIR not null
+if [ "$CONF_DIR" != "" ]; then
+  if [ ! -d "$CONF_DIR" ]; then
+    echo "[ERROR] ${CONF_DIR} is not a directory"
+    usage
+  fi
+else
+  if [ "$HAS_CONF_DIR" != "" ] && [ -d "$HAS_CONF_DIR" ]; then
+    CONF_DIR=${HAS_CONF_DIR}
+  else
+    echo "[ERROR] HAS_CONF_DIR is null or not a directory"
+    exit
+  fi
+fi
+
+# Load HAS environment variables
+if [ -f "${CONF_DIR}/has-env.sh" ]; then
+  . "${CONF_DIR}/has-env.sh"
+fi
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+  if [ X"$var" = X"-D" ]; then
+    DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8003,server=y,suspend=y"
+  fi
+done
+
+echo "[INFO] conf_dir=$CONF_DIR"
+HAS_OPTS="-DHAS_LOGFILE=hadmin"
+
+java ${DEBUG} -classpath target/lib/*:. ${HAS_OPTS} ${APP_MAIN} ${CONF_DIR}


[03/10] directory-kerby git commit: Add the HAS project to Kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java
new file mode 100644
index 0000000..1ae1a64
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java
@@ -0,0 +1,368 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.server.web.WebConfigKey;
+import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.bouncycastle.x509.X509V1CertificateGenerator;
+
+import javax.security.auth.x500.X500Principal;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.math.BigInteger;
+import java.net.URL;
+import java.security.GeneralSecurityException;
+import java.security.InvalidKeyException;
+import java.security.Key;
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.KeyStore;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+import java.security.SecureRandom;
+import java.security.SignatureException;
+import java.security.cert.Certificate;
+import java.security.cert.CertificateEncodingException;
+import java.security.cert.X509Certificate;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+class TestUtil {
+
+  /**
+   * system property for test data: {@value}
+   */
+  private static final String SYSPROP_TEST_DATA_DIR = "test.build.data";
+
+  /**
+   * The default path for using in Hadoop path references: {@value}
+   */
+  private static final String DEFAULT_TEST_DATA_PATH = "target/";
+
+  /**
+   * Get a temp path. This may or may not be relative; it depends on what the
+   * {@link #SYSPROP_TEST_DATA_DIR} is set to. If unset, it returns a path
+   * under the relative path {@link #DEFAULT_TEST_DATA_PATH}
+   *
+   * @param subPath sub path, with no leading "/" character
+   * @return a string to use in paths
+   */
+  public static String getTempPath(String subPath) {
+    String prop = System.getProperty(SYSPROP_TEST_DATA_DIR, DEFAULT_TEST_DATA_PATH);
+    if (prop.isEmpty()) {
+      // corner case: property is there but empty
+      prop = DEFAULT_TEST_DATA_PATH;
+    }
+    if (!prop.endsWith("/")) {
+      prop = prop + "/";
+    }
+    return prop + subPath;
+  }
+
+  public static String getClasspathDir(Class testClass) throws Exception {
+    String file = testClass.getName();
+    file = file.replace('.', '/') + ".class";
+    URL url = Thread.currentThread().getContextClassLoader().getResource(file);
+    String baseDir = url.toURI().getPath();
+    baseDir = baseDir.substring(0, baseDir.length() - file.length() - 1);
+    return baseDir;
+  }
+
+  @SuppressWarnings("deprecation")
+  /*
+   * Create a self-signed X.509 Certificate.
+   *
+   * @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB"
+   * @param pair the KeyPair
+   * @param days how many days from now the Certificate is valid for
+   * @param algorithm the signing algorithm, eg "SHA1withRSA"
+   * @return the self-signed certificate
+   */
+  private static X509Certificate generateCertificate(String dn, KeyPair pair, int days, String algorithm)
+      throws CertificateEncodingException, InvalidKeyException, IllegalStateException,
+      NoSuchProviderException, NoSuchAlgorithmException, SignatureException {
+
+    Date from = new Date();
+    Date to = new Date(from.getTime() + days * 86400000L);
+    BigInteger sn = new BigInteger(64, new SecureRandom());
+    X509V1CertificateGenerator certGen = new X509V1CertificateGenerator();
+    X500Principal dnName = new X500Principal(dn);
+
+    certGen.setSerialNumber(sn);
+    certGen.setIssuerDN(dnName);
+    certGen.setNotBefore(from);
+    certGen.setNotAfter(to);
+    certGen.setSubjectDN(dnName);
+    certGen.setPublicKey(pair.getPublic());
+    certGen.setSignatureAlgorithm(algorithm);
+
+    return certGen.generate(pair.getPrivate());
+  }
+
+  private static KeyPair generateKeyPair(String algorithm) throws NoSuchAlgorithmException {
+    KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
+    keyGen.initialize(1024);
+    return keyGen.genKeyPair();
+  }
+
+  private static KeyStore createEmptyKeyStore() throws GeneralSecurityException, IOException {
+    KeyStore ks = KeyStore.getInstance("JKS");
+    ks.load(null, null); // initialize
+    return ks;
+  }
+
+  private static void saveKeyStore(KeyStore ks, String filename, String password)
+      throws GeneralSecurityException, IOException {
+    FileOutputStream out = new FileOutputStream(filename);
+    ks.store(out, password.toCharArray());
+    out.close();
+  }
+
+  private static void createKeyStore(String filename, String password, String alias, Key privateKey, Certificate cert)
+      throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    ks.setKeyEntry(alias, privateKey, password.toCharArray(), new Certificate[]{cert});
+    saveKeyStore(ks, filename, password);
+  }
+
+  private static <T extends Certificate> void createTrustStore(String filename, String password, Map<String, T> certs)
+      throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    for (Map.Entry<String, T> cert : certs.entrySet()) {
+      ks.setCertificateEntry(cert.getKey(), cert.getValue());
+    }
+    saveKeyStore(ks, filename, password);
+  }
+
+  /**
+   * Performs complete setup of SSL configuration in preparation for testing an
+   * SSLFactory.  This includes keys, certs, keystore, truststore, the server
+   * SSL configuration file, the client SSL configuration file, and the master
+   * configuration file read by the SSLFactory.
+   *
+   * @param keystoreDir   String directory to save keystore
+   * @param sslConfDir    String directory to save SSL configuration files
+   * @param conf          Configuration master configuration to be used by an SSLFactory,
+   *                      which will be mutated by this method
+   * @param useClientCert boolean true to make the client present a cert in the SSL handshake
+   */
+  public static void setupSSLConfig(String keystoreDir, String sslConfDir, HasConfig conf, boolean useClientCert)
+      throws Exception {
+    setupSSLConfig(keystoreDir, sslConfDir, conf, useClientCert, true, "");
+  }
+
+  /**
+   * Performs complete setup of SSL configuration in preparation for testing an
+   * SSLFactory.  This includes keys, certs, keystore, truststore, the server
+   * SSL configuration file, the client SSL configuration file, and the master
+   * configuration file read by the SSLFactory.
+   *
+   * @param keystoreDir   String directory to save keystore
+   * @param sslConfDir    String directory to save SSL configuration files
+   * @param conf          Configuration master configuration to be used by an SSLFactory,
+   *                      which will be mutated by this method
+   * @param useClientCert boolean true to make the client present a cert in the SSL handshake
+   * @param trustStore    boolean true to create truststore, false not to create it
+   * @param excludeCiphers String comma separated ciphers to exclude
+   * @throws Exception e
+   */
+  private static void setupSSLConfig(String keystoreDir, String sslConfDir, HasConfig conf, boolean useClientCert,
+                                     boolean trustStore, String excludeCiphers) throws Exception {
+    String clientKS = keystoreDir + "/clientKS.jks";
+    String clientPassword = "clientP";
+    String serverKS = keystoreDir + "/serverKS.jks";
+    String serverPassword = "serverP";
+    String trustKS = null;
+    String trustPassword = "trustP";
+
+    File sslClientConfFile = new File(sslConfDir, getClientSSLConfigFileName());
+    File sslServerConfFile = new File(sslConfDir, getServerSSLConfigFileName());
+
+    Map<String, X509Certificate> certs = new HashMap<String, X509Certificate>();
+
+    if (useClientCert) {
+      KeyPair cKP = TestUtil.generateKeyPair("RSA");
+      X509Certificate cCert = TestUtil.generateCertificate("CN=localhost, O=client", cKP, 30, "SHA1withRSA");
+      TestUtil.createKeyStore(clientKS, clientPassword, "client", cKP.getPrivate(), cCert);
+      certs.put("client", cCert);
+    }
+
+    KeyPair sKP = TestUtil.generateKeyPair("RSA");
+    X509Certificate sCert = TestUtil.generateCertificate("CN=localhost, O=server", sKP, 30, "SHA1withRSA");
+    TestUtil.createKeyStore(serverKS, serverPassword, "server", sKP.getPrivate(), sCert);
+    certs.put("server", sCert);
+
+    if (trustStore) {
+      trustKS = keystoreDir + "/trustKS.jks";
+      TestUtil.createTrustStore(trustKS, trustPassword, certs);
+    }
+
+    HasConfig clientSSLConf = createClientSSLConfig(clientKS, clientPassword, clientPassword, trustKS, excludeCiphers);
+    HasConfig serverSSLConf = createServerSSLConfig(serverKS, serverPassword, serverPassword, trustKS, excludeCiphers);
+
+    saveConfig(sslClientConfFile, clientSSLConf);
+    saveConfig(sslServerConfFile, serverSSLConf);
+
+    conf.setString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
+    conf.setString(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfFile.getCanonicalPath());
+    conf.setString(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile.getCanonicalPath());
+    conf.setString(WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, sslServerConfFile.getAbsolutePath());
+    conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, useClientCert);
+  }
+
+  /**
+   * Create SSL configuration for a client.
+   *
+   * @param clientKS       String client keystore file
+   * @param password       String store password, or null to avoid setting store password
+   * @param keyPassword    String key password, or null to avoid setting key password
+   * @param trustKS        String truststore file
+   * @param excludeCiphers String comma separated ciphers to exclude
+   * @return Configuration for client SSL
+   */
+  private static HasConfig createClientSSLConfig(String clientKS, String password, String keyPassword,
+                                                 String trustKS, String excludeCiphers) {
+    return createSSLConfig(SSLFactory.Mode.CLIENT, clientKS, password, keyPassword, trustKS, excludeCiphers);
+  }
+
+  /**
+   * Creates SSL configuration for a server.
+   *
+   * @param serverKS       String server keystore file
+   * @param password       String store password, or null to avoid setting store password
+   * @param keyPassword    String key password, or null to avoid setting key password
+   * @param trustKS        String truststore file
+   * @param excludeCiphers String comma separated ciphers to exclude
+   * @return HasConfig
+   * @throws IOException e
+   */
+  private static HasConfig createServerSSLConfig(String serverKS, String password, String keyPassword,
+                                                 String trustKS, String excludeCiphers) throws IOException {
+    return createSSLConfig(SSLFactory.Mode.SERVER, serverKS, password, keyPassword, trustKS, excludeCiphers);
+  }
+
+  /**
+   * Returns the client SSL configuration file name.  Under parallel test
+   * execution, this file name is parametrized by a unique ID to ensure that
+   * concurrent tests don't collide on an SSL configuration file.
+   *
+   * @return client SSL configuration file name
+   */
+  private static String getClientSSLConfigFileName() {
+    return getSSLConfigFileName("ssl-client");
+  }
+
+  /**
+   * Returns the server SSL configuration file name.  Under parallel test
+   * execution, this file name is parametrized by a unique ID to ensure that
+   * concurrent tests don't collide on an SSL configuration file.
+   *
+   * @return client SSL configuration file name
+   */
+  private static String getServerSSLConfigFileName() {
+    return getSSLConfigFileName("ssl-server");
+  }
+
+  /**
+   * Returns an SSL configuration file name.  Under parallel test
+   * execution, this file name is parametrized by a unique ID to ensure that
+   * concurrent tests don't collide on an SSL configuration file.
+   *
+   * @param base the base of the file name
+   * @return SSL configuration file name for base
+   */
+  private static String getSSLConfigFileName(String base) {
+    String testUniqueForkId = System.getProperty("test.unique.fork.id");
+    String fileSuffix = testUniqueForkId != null ? "-" + testUniqueForkId : "";
+    return base + fileSuffix + ".xml";
+  }
+
+  /**
+   * Creates SSL configuration.
+   *
+   * @param mode        SSLFactory.Mode mode to configure
+   * @param keystore    String keystore file
+   * @param password    String store password, or null to avoid setting store password
+   * @param keyPassword String key password, or null to avoid setting key password
+   * @param trustKS     String truststore file
+   * @return Configuration for SSL
+   */
+  private static HasConfig createSSLConfig(SSLFactory.Mode mode, String keystore, String password,
+                                           String keyPassword, String trustKS, String excludeCiphers) {
+    String trustPassword = "trustP";
+
+    HasConfig sslConf = new HasConfig();
+    if (keystore != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_KEYSTORE_LOCATION_TPL_KEY), keystore);
+    }
+    if (password != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY), password);
+    }
+    if (keyPassword != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
+          keyPassword);
+    }
+    if (trustKS != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_TRUSTSTORE_LOCATION_TPL_KEY), trustKS);
+    }
+    if (trustPassword != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_TRUSTSTORE_PASSWORD_TPL_KEY),
+          trustPassword);
+    }
+    if (null != excludeCiphers && !excludeCiphers.isEmpty()) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_EXCLUDE_CIPHER_LIST),
+          excludeCiphers);
+    }
+    sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY), "1000");
+
+    return sslConf;
+  }
+
+  /**
+   * Saves configuration to a file.
+   *
+   * @param file File to save
+   * @param conf Configuration contents to write to file
+   * @throws IOException if there is an I/O error saving the file
+   */
+  private static void saveConfig(File file, HasConfig conf) throws IOException {
+    OutputStream output = new FileOutputStream(file);
+    Properties prop = new Properties();
+
+    // set the properties value
+    for (String name : conf.getNames()) {
+      prop.setProperty(name, conf.getString(name));
+    }
+
+    // save properties to project root folder
+    prop.store(output, null);
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java
new file mode 100644
index 0000000..1f7b443
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java
@@ -0,0 +1,83 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.json;
+
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import java.io.File;
+import java.io.IOException;
+import javax.ws.rs.core.MultivaluedMap;
+
+import static org.junit.Assert.assertEquals;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestJsonConfApi extends TestRestApiBase {
+
+    @Test
+    public void testSetPlugin() {
+        WebResource webResource = getWebResource("conf/setplugin");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("plugin", "RAM");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("HAS plugin set successfully.\n", response);
+    }
+
+    @Test
+    public void testConfigKdcBackend() {
+        WebResource webResource = getWebResource("conf/configkdcbackend");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("backendType", "json");
+        String backend = null;
+        try {
+            backend = new File(testDir, "json-backend").getCanonicalPath();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        params.add("dir", backend);
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("Json backend set successfully.\n", response);
+    }
+
+    @Test
+    public void testConfigXJsonKdc() {
+        WebResource webResource = getWebResource("conf/configkdc");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("realm", "HADOOP.COM");
+        params.add("host", "localhost");
+        params.add("port", "8866");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("HAS server KDC set successfully.\n", response);
+    }
+
+    @Test
+    public void testGetKrb5Conf() {
+        getKrb5Conf();
+    }
+
+    @Test
+    public void testGetHasConf() {
+        getHasConf();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java
new file mode 100644
index 0000000..412a8a1
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java
@@ -0,0 +1,80 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.json;
+
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import javax.ws.rs.core.MultivaluedMap;
+
+import static org.junit.Assert.assertEquals;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestJsonHadminApi extends TestRestApiBase {
+
+    @Test
+    public void testCreatePrincipals() {
+        createPrincipals();
+    }
+
+    @Test
+    public void testExportKeytabs() {
+        exportKeytabs();
+    }
+
+    @Test
+    public void testExportKeytab() {
+        exportKeytab();
+    }
+
+    @Test
+    public void testAddPrincipal() {
+        addPrincipal();
+    }
+
+    @Test
+    public void testGetPrincipals() {
+        getPrincipals();
+    }
+
+    @Test
+    public void testRenamePrincipal() {
+        renamePrincipal();
+    }
+
+    @Test
+    public void testXDeletePrincipal() {
+        deletePrincipal();
+    }
+
+    @Test
+    public void testSetConf() {
+        WebResource webResource = getWebResource("admin/setconf");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("isEnable", "true");
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java
new file mode 100644
index 0000000..bd72448
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java
@@ -0,0 +1,54 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.json;
+
+import com.sun.jersey.api.client.WebResource;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import java.io.File;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestJsonHasApi extends TestRestApiBase {
+
+    @Test
+    public void hostRoles() {
+        WebResource webResource = getWebResource("hostroles");
+        String response = webResource.get(String.class);
+        System.out.println(response);
+    }
+
+    @Test
+    public void testKdcStart() {
+        kdcStart();
+        File backendDir = new File(testDir, "json-backend");
+        if (backendDir.exists()) {
+            FileUtil.fullyDelete(backendDir);
+        }
+    }
+
+    @Test
+    public void testKdcInit() {
+        kdcInit();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java
new file mode 100644
index 0000000..6dc240d
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java
@@ -0,0 +1,70 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.mysql;
+
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import javax.ws.rs.core.MultivaluedMap;
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestMySQLConfApi extends TestRestApiBase {
+
+    @Test
+    public void testConfigKdcBackend() throws IOException {
+        WebResource webResource = getWebResource("conf/configkdcbackend");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("backendType", "mysql");
+        params.add("driver", "org.h2.Driver");
+        params.add("url", "jdbc:h2:" + testDir.getCanonicalPath() + "/mysql-backend/mysqlbackend;MODE=MySQL");
+        params.add("user", "root");
+        params.add("password", "123456");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("MySQL backend set successfully.\n", response);
+    }
+
+    @Test
+    public void testConfigMySQLKdc() {
+        WebResource webResource = getWebResource("conf/configkdc");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("realm", "HADOOP.COM");
+        params.add("host", "localhost");
+        params.add("port", "8899");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("HAS server KDC set successfully.\n", response);
+    }
+
+    @Test
+    public void testGetKrb5Conf() {
+        getKrb5Conf();
+    }
+
+    @Test
+    public void testGetHasConf() {
+        getHasConf();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java
new file mode 100644
index 0000000..8adb625
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java
@@ -0,0 +1,64 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.mysql;
+
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestMySQLHadminApi extends TestRestApiBase {
+
+    @Test
+    public void testCreatePrincipals() {
+        createPrincipals();
+    }
+
+    @Test
+    public void testExportKeytabs() {
+        exportKeytabs();
+    }
+
+    @Test
+    public void testExportKeytab() {
+        exportKeytab();
+    }
+
+    @Test
+    public void testAddPrincipal() {
+        addPrincipal();
+    }
+
+    @Test
+    public void testGetPrincipals() {
+        getPrincipals();
+    }
+
+    @Test
+    public void testRenamePrincipal() {
+        renamePrincipal();
+    }
+
+    @Test
+    public void testXDeletePrincipal() {
+        deletePrincipal();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java
new file mode 100644
index 0000000..f2b6a4a
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java
@@ -0,0 +1,46 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.mysql;
+
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import java.io.File;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestMySQLHasApi extends TestRestApiBase {
+
+    @Test
+    public void testKdcStart() {
+        kdcStart();
+        File backendDir = new File(testDir, "mysql-backend");
+        if (backendDir.exists()) {
+            FileUtil.fullyDelete(backendDir);
+        }
+    }
+
+    @Test
+    public void testKdcInit() {
+        kdcInit();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/conf/backend.conf
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/conf/backend.conf b/has/has-server/src/test/resources/conf/backend.conf
new file mode 100644
index 0000000..2085217
--- /dev/null
+++ b/has/has-server/src/test/resources/conf/backend.conf
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+kdc_identity_backend = org.apache.kerby.kerberos.kdc.identitybackend.JsonIdentityBackend
+backend.json.dir = /tmp/test/has/jsonbackend

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/conf/has-server.conf
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/conf/has-server.conf b/has/has-server/src/test/resources/conf/has-server.conf
new file mode 100644
index 0000000..dcea4ad
--- /dev/null
+++ b/has/has-server/src/test/resources/conf/has-server.conf
@@ -0,0 +1,25 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[HAS]
+  https_host = localhost
+  https_port = 8092
+  enable_conf = true
+
+[PLUGIN]
+  auth_type = RAM

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/conf/kdc.conf
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/conf/kdc.conf b/has/has-server/src/test/resources/conf/kdc.conf
new file mode 100644
index 0000000..a74e180
--- /dev/null
+++ b/has/has-server/src/test/resources/conf/kdc.conf
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[kdcdefaults]
+  kdc_host = localhost
+  kdc_udp_port = 8866
+  kdc_tcp_port = 8866
+  kdc_realm = HADOOP.COM

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/conf/krb5.conf
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/conf/krb5.conf b/has/has-server/src/test/resources/conf/krb5.conf
new file mode 100644
index 0000000..0f5c367
--- /dev/null
+++ b/has/has-server/src/test/resources/conf/krb5.conf
@@ -0,0 +1,29 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[libdefaults]
+    kdc_realm = HADOOP.COM
+    default_realm = HADOOP.COM
+    udp_preference_limit = 4096
+    kdc_tcp_port = 8866
+    kdc_udp_port = 8866
+
+[realms]
+    HADOOP.COM = {
+        kdc = localhost:8866
+    }

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/webapps/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/webapps/WEB-INF/web.xml b/has/has-server/src/test/resources/webapps/WEB-INF/web.xml
new file mode 100644
index 0000000..b13cb1f
--- /dev/null
+++ b/has/has-server/src/test/resources/webapps/WEB-INF/web.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License. See accompanying LICENSE file.
+-->
+<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee">
+
+</web-app>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/webapps/has/index.html
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/webapps/has/index.html b/has/has-server/src/test/resources/webapps/has/index.html
new file mode 100644
index 0000000..6f80950
--- /dev/null
+++ b/has/has-server/src/test/resources/webapps/has/index.html
@@ -0,0 +1,24 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta http-equiv="REFRESH" charset="UTF-8" />
+<title>HAS Administration</title>
+</head>
+</html>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/pom.xml b/has/has-tool/has-client-tool/pom.xml
new file mode 100644
index 0000000..7ded9e1
--- /dev/null
+++ b/has/has-tool/has-client-tool/pom.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>has-tool</artifactId>
+        <groupId>org.apache.hadoop</groupId>
+        <version>1.0.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>has-client-tool</artifactId>
+
+    <dependencies>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>has-client</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.kerby</groupId>
+        <artifactId>kdc-tool</artifactId>
+        <version>${kerby.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>has-plugins</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+    </dependencies>
+
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java
new file mode 100644
index 0000000..8756420
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java
@@ -0,0 +1,164 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HadminRemoteCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteAddPrincipalCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteCreatePrincipalsCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteDeletePrincipalCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteDisableConfCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteEnableConfCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteExportKeytabsCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteGetHostRolesCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteGetPrincipalsCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteRenamePrincipalCmd;
+import org.apache.kerby.util.OSUtil;
+
+import java.io.File;
+import java.util.Scanner;
+
+public class HadminRemoteTool {
+
+    private static final String PROMPT = HadminRemoteTool.class.getSimpleName() + ".remote";
+    private static final String USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\hadmin-remote.cmd" : "Usage: sh bin/hadmin-remote.sh")
+        + " <conf-file>\n"
+        + "\tExample:\n"
+        + "\t\t"
+        + (OSUtil.isWindows()
+        ? "bin\\hadmin-remote.cmd" : "sh bin/hadmin-remote.sh")
+        + " conf\n";
+
+    private static final String LEGAL_COMMANDS = "Available commands are: "
+        + "\n"
+        + "add_principal, addprinc\n"
+        + "                         Add principal\n"
+        + "delete_principal, delprinc\n"
+        + "                         Delete principal\n"
+        + "rename_principal, renprinc\n"
+        + "                         Rename principal\n"
+        + "list_principals, listprincs\n"
+        + "                         List principals\n"
+        + "get_hostroles, hostroles\n"
+        + "                         Get hostRoles\n"
+        + "export_keytabs, expkeytabs\n"
+        + "                         Export keytabs\n"
+        + "create_principals, creprincs\n"
+        + "                         Create principals\n"
+        + "enable_configure, enable\n"
+        + "                         Enable configure\n"
+        + "disable_configure, disable\n"
+        + "                         Disable configure\n";
+
+    public static void main(String[] args) {
+        HasAdminClient hadmin;
+        HasAuthAdminClient authHasAdminClient = null;
+
+        if (args.length < 1) {
+            System.err.println(USAGE);
+            System.exit(1);
+        }
+
+        String confDirPath = args[0];
+        File confFile = new File(confDirPath, "hadmin.conf");
+        HasConfig hasConfig;
+        try {
+            hasConfig = HasUtil.getHasConfig(confFile);
+        } catch (HasException e) {
+            System.err.println(e.getMessage());
+            return;
+        }
+
+        hadmin = new HasAdminClient(hasConfig);
+
+        if (hasConfig.getFilterAuthType().equals("kerberos")) {
+            authHasAdminClient = new HasAuthAdminClient(hasConfig);
+        }
+
+        System.out.println("enter \"cmd\" to see legal commands.");
+        System.out.print(PROMPT + ": ");
+
+        try (Scanner scanner = new Scanner(System.in, "UTF-8")) {
+            String input = scanner.nextLine();
+
+            while (!(input.equals("quit") || input.equals("exit") || input.equals("q"))) {
+                try {
+                    execute(hadmin, authHasAdminClient, input);
+                } catch (HasException e) {
+                    System.err.println(e.getMessage());
+                }
+                System.out.print(PROMPT + ": ");
+                input = scanner.nextLine();
+            }
+        }
+    }
+
+    private static void execute(HasAdminClient hadmin, HasAuthAdminClient hasAuthAdminClient,
+                               String input) throws HasException {
+        input = input.trim();
+        if (input.startsWith("cmd")) {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        HadminRemoteCmd executor;
+
+        String[] items = input.split("\\s+");
+        String cmd = items[0];
+
+        if (cmd.equals("add_principal")
+            || cmd.equals("addprinc")) {
+            executor = new HasRemoteAddPrincipalCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("delete_principal")
+            || cmd.equals("delprinc")) {
+            executor = new HasRemoteDeletePrincipalCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("rename_principal")
+            || cmd.equals("renprinc")) {
+            executor = new HasRemoteRenamePrincipalCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("list_principals")
+            || cmd.equals("listprincs")) {
+            executor = new HasRemoteGetPrincipalsCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("get_hostroles")
+            || cmd.equals("hostroles")) {
+            executor = new HasRemoteGetHostRolesCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("create_principals")
+            || cmd.equals("creprincs")) {
+            executor = new HasRemoteCreatePrincipalsCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("export_keytabs")
+            || cmd.equals("expkeytabs")) {
+            executor = new HasRemoteExportKeytabsCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("enable_configure")
+            || cmd.equals("enable")) {
+            executor = new HasRemoteEnableConfCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("disable_configure")
+            || cmd.equals("disable")) {
+            executor = new HasRemoteDisableConfCmd(hadmin, hasAuthAdminClient);
+        } else {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        executor.execute(items);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
new file mode 100644
index 0000000..81f6d98
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+public abstract class HadminRemoteCmd {
+
+    private HasAdminClient hadmin;
+    private HasAuthAdminClient authHadmin;
+
+    public HadminRemoteCmd(HasAdminClient hadmin, HasAuthAdminClient authHadminClient) {
+        this.hadmin = hadmin;
+        this.authHadmin = authHadminClient;
+    }
+
+    protected HasAdminClient getHadmin() {
+        return hadmin;
+    }
+
+    protected HasAuthAdminClient getAuthHadmin() {
+        return authHadmin;
+    }
+
+    /**
+     * Execute the hadmin cmd.
+     * @param input Input cmd to execute
+     */
+    public abstract void execute(String[] input) throws HasException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
new file mode 100644
index 0000000..39a24d0
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
@@ -0,0 +1,70 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+/**
+ * Remote add principal cmd
+ */
+public class HasRemoteAddPrincipalCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: add_principal [options] <principal-name>\n"
+        + "\toptions are:\n"
+        + "\t\t[-randkey]\n"
+        + "\t\t[-pw password]"
+        + "\tExample:\n"
+        + "\t\tadd_principal -pw mypassword alice\n";
+
+    public HasRemoteAddPrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        String clientPrincipal = items[items.length - 1];
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        if (!items[1].startsWith("-")) {
+            hasAdminClient.addPrincipal(clientPrincipal);
+        } else if (items[1].startsWith("-randkey")) {
+            hasAdminClient.addPrincipal(clientPrincipal);
+        } else if (items[1].startsWith("-pw")) {
+            String password = items[2];
+            hasAdminClient.addPrincipal(clientPrincipal, password);
+        } else {
+            System.err.println("add_principal cmd format error.");
+            System.err.println(USAGE);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
new file mode 100644
index 0000000..aa79e23
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
@@ -0,0 +1,82 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+
+public class HasRemoteCreatePrincipalsCmd extends HadminRemoteCmd {
+    private static final String USAGE = "\nUsage: create_principals [hostRoles-file]\n"
+            + "\t'hostRoles-file' is a file with a hostRoles json string like:\n"
+            + "\t\t{HOSTS: [ {\"name\":\"host1\",\"hostRoles\":\"HDFS\"}, "
+            + "{\"name\":\"host2\",\"hostRoles\":\"HDFS,HBASE\"} ] }\n"
+            + "\tExample:\n"
+            + "\t\tcreate_principals hostroles.txt\n";
+
+    public HasRemoteCreatePrincipalsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        //String param = items[0];
+        if (items.length != 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        File hostRoles = new File(items[1]);
+        if (!hostRoles.exists()) {
+            System.err.println("HostRoles file is not exists.");
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        BufferedReader reader;
+        try {
+            reader = new BufferedReader(new FileReader(hostRoles));
+        } catch (FileNotFoundException e) {
+            throw new HasException("File not exist", e);
+        }
+        StringBuilder sb = new StringBuilder();
+        String tempString;
+        try {
+            while ((tempString = reader.readLine()) != null) {
+                sb.append(tempString);
+            }
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when read line. ", e);
+        }
+        hasAdminClient.requestCreatePrincipals(sb.toString());
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
new file mode 100644
index 0000000..260ff2c
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
@@ -0,0 +1,89 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+import java.io.Console;
+import java.util.Scanner;
+
+/**
+ * Remote delete principal cmd
+ */
+public class HasRemoteDeletePrincipalCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: delete_principal <principal-name>\n"
+        + "\tExample:\n"
+        + "\t\tdelete_principal alice\n";
+
+    public HasRemoteDeletePrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        String principal = items[items.length - 1];
+        String reply;
+        Console console = System.console();
+        String prompt = "Are you sure to delete the principal? (yes/no, YES/NO, y/n, Y/N) ";
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                + "maybe you're running this from within an IDE. "
+                + "Use scanner to read password.");
+            Scanner scanner = new Scanner(System.in, "UTF-8");
+            reply = getReply(scanner, prompt);
+        } else {
+            reply = getReply(console, prompt);
+        }
+        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
+            hasAdminClient.deletePrincipal(principal);
+        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
+            System.out.println("Principal \"" + principal + "\"  not deleted.");
+        } else {
+            System.err.println("Unknown request, fail to delete the principal.");
+            System.err.println(USAGE);
+        }
+    }
+
+    private String getReply(Scanner scanner, String prompt) {
+        System.out.println(prompt);
+        return scanner.nextLine().trim();
+    }
+
+    private String getReply(Console console, String prompt) {
+        console.printf(prompt);
+        String line = console.readLine();
+        return line;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
new file mode 100644
index 0000000..30027b3
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+/**
+ * Remote add principal cmd
+ */
+public class HasRemoteDisableConfCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: disable_configure\n"
+            + "\tExample:\n"
+            + "\t\tdisable\n";
+
+    public HasRemoteDisableConfCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+        hasAdminClient.setEnableOfConf("false");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
new file mode 100644
index 0000000..852d487
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+/**
+ * Remote add principal cmd
+ */
+public class HasRemoteEnableConfCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: enable_configure\n"
+            + "\tExample:\n"
+            + "\t\tenable\n";
+
+    public HasRemoteEnableConfCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+        hasAdminClient.setEnableOfConf("true");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
new file mode 100644
index 0000000..ead3b28
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
@@ -0,0 +1,58 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+public class HasRemoteExportKeytabsCmd extends HadminRemoteCmd {
+    private static final String USAGE = "\nUsage: export_keytabs <host> [role]\n"
+            + "\tExample:\n"
+            + "\t\texport_keytabs host1 HDFS\n";
+
+    public HasRemoteExportKeytabsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        //TODO add save path option
+        //String param = items[0];
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        String host = items[1];
+        String role = "";
+        if (items.length >= 3) {
+            role = items[2];
+        }
+        hasAdminClient.getKeytabByHostAndRole(host, role);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
new file mode 100644
index 0000000..70b9ea7
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
@@ -0,0 +1,68 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+
+public class HasRemoteGetHostRolesCmd extends HadminRemoteCmd {
+    private static final String USAGE = "Usage: get_hostroles\n"
+            + "\tExample:\n"
+            + "\t\tget_hostroles\n";
+
+    public HasRemoteGetHostRolesCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] input) throws HasException {
+        HasAdminClient hasAdminClient = getHadmin();
+        String result = hasAdminClient.getHostRoles();
+
+        if (result != null) {
+            try {
+                JSONArray hostRoles = new JSONArray(result);
+                for (int i = 0; i < hostRoles.length(); i++) {
+                    JSONObject hostRole = hostRoles.getJSONObject(i);
+                    System.out.print("\tHostRole: " + hostRole.getString("HostRole")
+                            + ", PrincipalNames: ");
+                    JSONArray principalNames = hostRole.getJSONArray("PrincipalNames");
+                    for (int j = 0; j < principalNames.length(); j++) {
+                        System.out.print(principalNames.getString(j));
+                        if (j == principalNames.length() - 1) {
+                            System.out.println();
+                        } else {
+                            System.out.print(", ");
+                        }
+                    }
+                }
+            } catch (JSONException e) {
+                throw new HasException("Errors occurred when getting the host roles.", e);
+            }
+        } else {
+            throw new HasException("Could not get hostRoles.");
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
new file mode 100644
index 0000000..05d6970
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
@@ -0,0 +1,76 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+import java.util.List;
+
+public class HasRemoteGetPrincipalsCmd extends HadminRemoteCmd {
+    private static final String USAGE = "Usage: list_principals [expression]\n"
+            + "\t'expression' is a shell-style glob expression that can contain the wild-card characters ?, *, and []."
+            + "\tExample:\n"
+            + "\t\tlist_principals [expression]\n";
+
+    public HasRemoteGetPrincipalsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length > 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        List<String> principalLists = null;
+
+        if (items.length == 1) {
+            try {
+                principalLists = hasAdminClient.getPrincipals();
+            } catch (Exception e) {
+                System.err.println("Errors occurred when getting the principals. " + e.getMessage());
+            }
+        } else {
+            //have expression
+            String exp = items[1];
+            principalLists = hasAdminClient.getPrincipals(exp);
+        }
+
+        if (principalLists.size() == 0 || principalLists.size() == 1 && principalLists.get(0).isEmpty()) {
+            return;
+        } else {
+            System.out.println("Principals are listed:");
+            for (int i = 0; i < principalLists.size(); i++) {
+                System.out.println(principalLists.get(i));
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
new file mode 100644
index 0000000..f900f3a
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
@@ -0,0 +1,91 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+import java.io.Console;
+import java.util.Scanner;
+
+/**
+ * Remote rename principal cmd
+ */
+public class HasRemoteRenamePrincipalCmd extends HadminRemoteCmd {
+    public static final String USAGE = "Usage: rename_principal <old_principal_name>"
+        + " <new_principal_name>\n"
+        + "\tExample:\n"
+        + "\t\trename_principal alice bob\n";
+
+    public HasRemoteRenamePrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 3) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        String oldPrincipalName = items[items.length - 2];
+        String newPrincipalName = items[items.length - 1];
+
+        String reply;
+        Console console = System.console();
+        String prompt = "Are you sure to rename the principal? (yes/no, YES/NO, y/n, Y/N) ";
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                + "maybe you're running this from within an IDE. "
+                + "Use scanner to read password.");
+            Scanner scanner = new Scanner(System.in, "UTF-8");
+            reply = getReply(scanner, prompt);
+        } else {
+            reply = getReply(console, prompt);
+        }
+        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
+            hasAdminClient.renamePrincipal(oldPrincipalName, newPrincipalName);
+        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
+            System.out.println("Principal \"" + oldPrincipalName + "\"  not renamed.");
+        } else {
+            System.err.println("Unknown request, fail to rename the principal.");
+            System.err.println(USAGE);
+        }
+    }
+
+    private String getReply(Scanner scanner, String prompt) {
+        System.out.println(prompt);
+        return scanner.nextLine().trim();
+    }
+
+    private String getReply(Console console, String prompt) {
+        console.printf(prompt);
+        String line = console.readLine();
+        return line;
+    }
+}


[05/10] directory-kerby git commit: Add the HAS project to Kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
new file mode 100644
index 0000000..589e092
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
@@ -0,0 +1,382 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.admin;
+
+import org.apache.hadoop.has.common.HasAdmin;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.HasServer;
+import org.apache.hadoop.has.server.web.HostRoleType;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadmin;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadminImpl;
+import org.apache.kerby.kerberos.kerb.common.KrbUtil;
+import org.apache.kerby.kerberos.kerb.identity.KrbIdentity;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.server.KdcConfig;
+import org.apache.kerby.kerberos.kerb.server.KdcSetting;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+import org.apache.kerby.kerberos.kerb.server.ServerSetting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+public class LocalHasAdmin implements HasAdmin {
+    public static final Logger LOG = LoggerFactory.getLogger(LocalHasAdmin.class);
+
+    private final ServerSetting serverSetting;
+    private File confDir;
+
+    public LocalHasAdmin(HasServer hasServer) throws KrbException {
+        if (hasServer.getKdcServer() == null) {
+            throw new RuntimeException("Could not get HAS KDC server, please start KDC first.");
+        }
+        this.serverSetting = hasServer.getKdcServer().getKdcSetting();
+    }
+
+    /**
+     * Construct with prepared conf dir.
+     *
+     * @param confDir The path of conf dir
+     * @throws KrbException e
+     */
+    public LocalHasAdmin(File confDir) throws KrbException {
+        this.confDir = confDir;
+        KdcConfig tmpKdcConfig = KdcUtil.getKdcConfig(confDir);
+        if (tmpKdcConfig == null) {
+            tmpKdcConfig = new KdcConfig();
+        }
+
+        BackendConfig tmpBackendConfig = KdcUtil.getBackendConfig(confDir);
+        if (tmpBackendConfig == null) {
+            tmpBackendConfig = new BackendConfig();
+        }
+
+        this.serverSetting = new KdcSetting(tmpKdcConfig, tmpBackendConfig);
+    }
+
+    @Override
+    public List<String> getPrincipals(String exp) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        List<String> princs = null;
+        LOG.info("The value of exp is : " + exp);
+        if (exp == null || exp.equals("")) {
+            try {
+                princs = kadmin.getPrincipals();
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        } else {
+            try {
+                princs = kadmin.getPrincipals(exp);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+        return princs;
+    }
+
+    @Override
+    public void addPrincipal(String principal, String password) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        if (principal == null) {
+            throw new HasException("Value of principal is null.");
+        }
+        if (password == null || password.equals("")) {
+            try {
+                kadmin.addPrincipal(principal);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        } else {
+            try {
+                kadmin.addPrincipal(principal, password);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+        LOG.info("Success to add principal :" + principal);
+    }
+
+    @Override
+    public void deletePrincipal(String principal) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+             throw new HasException(e);
+        }
+        if (principal == null) {
+            throw new IllegalArgumentException("Value of principal is null.");
+        }
+        try {
+            kadmin.deletePrincipal(principal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        LOG.info("Success to delete principal :" + principal);
+    }
+
+    @Override
+    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            kadmin.renamePrincipal(oldPrincipal, newPrincipal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        LOG.info("Success to rename principal : \"" + oldPrincipal
+                + "\" to \"" + newPrincipal + "\".");
+    }
+
+    @Override
+    public String addPrincByRole(String host, String role) throws HasException {
+        String result = "";
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        String releam = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
+        String[] princs = HostRoleType.valueOf(role).getPrincs();
+        if (princs == null) {
+            LOG.error("Cannot find the role of : " + role);
+            return "Cannot find the role of : " + role;
+        }
+        for (String princ : princs) {
+            try {
+                kadmin.addPrincipal(princ + releam);
+                LOG.info("Success to add princ :" + princ + releam);
+                result = result + "Success to add princ :" + princ + releam + "\n";
+            } catch (KrbException e) {
+                LOG.info(e.getMessage());
+                result = e.getMessage() + "\n";
+            }
+        }
+        return result;
+    }
+
+    @Override
+    public File getKeytabByHostAndRole(String host, String role) throws HasException {
+        LocalKadmin kadmin;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        String realm = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
+        File path = new File("/tmp/" + System.currentTimeMillis());
+        path.mkdirs();
+        File keytab = new File(path, role + "-" + host + ".keytab");
+        if (keytab.exists()) {
+            keytab.delete();
+        }
+        String[] princs = HostRoleType.valueOf(role).getPrincs();
+        for (String princ : princs) {
+            try {
+                if (kadmin.getPrincipal(princ + realm) == null) {
+                    continue;
+                }
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+            try {
+                kadmin.exportKeytab(keytab, princ + realm);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+        return keytab;
+    }
+
+    public void getKeytabByHostAndRole(String host, String role, File keytab) throws HasException {
+        LocalKadmin kadmin;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        String realm = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
+        if (keytab.exists()) {
+            keytab.delete();
+        }
+        String[] princs = HostRoleType.valueOf(role).getPrincs();
+        for (String princ : princs) {
+            try {
+                if (kadmin.getPrincipal(princ + realm) == null) {
+                    continue;
+                }
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+            try {
+                kadmin.exportKeytab(keytab, princ + realm);
+                System.out.println("Success to export keytab : " + keytab.getAbsolutePath());
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+    }
+
+    @Override
+    public List<String> getPrincipals() throws HasException {
+        LocalKadmin kadmin;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            return kadmin.getPrincipals();
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+    }
+
+    public KrbIdentity getPrincipal(String principalName) throws HasException {
+        LocalKadmin kadmin;
+        KrbIdentity identity;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            identity = kadmin.getPrincipal(principalName);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        return identity;
+    }
+
+    @Override
+    public void addPrincipal(String principal) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        if (principal == null) {
+            throw new HasException("Value of principal is null.");
+        }
+        try {
+            kadmin.addPrincipal(principal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        LOG.info("Success to add principal :" + principal);
+    }
+
+    @Override
+    public String getHadminPrincipal() {
+        return KrbUtil.makeKadminPrincipal(serverSetting.getKdcRealm()).getName();
+    }
+
+    /**
+     * get size of principal
+     */
+    @Override
+    public int size() throws HasException {
+        return this.getPrincipals().size();
+    }
+
+    @Override
+    public void setEnableOfConf(String isEnable) throws HasException {
+        File hasConf = new File(confDir, "has-server.conf");
+        if (!hasConf.exists()) {
+            System.err.println("has-server.conf is not exists.");
+            return;
+        }
+        try {
+            HasUtil.setEnableConf(hasConf, isEnable);
+        } catch (IOException e) {
+            System.err.println(e.getMessage());
+            return;
+        }
+    }
+
+    @Override
+    public void exportKeytab(File keytabFile, String principal)
+        throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            kadmin.exportKeytab(keytabFile, principal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public void exportKeytab(File keytabFile, List<String> principals)
+            throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            kadmin.exportKeytab(keytabFile, principals);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+    }
+
+    public void getHostRoles() {
+        for (HostRoleType role : HostRoleType.values()) {
+            System.out.print("\tHostRole: " + role.getName()
+                    + ", PrincipalNames: ");
+            String[] princs = role.getPrincs();
+            for (int j = 0; j < princs.length; j++) {
+                System.out.print(princs[j]);
+                if (j == princs.length - 1) {
+                    System.out.println();
+                } else {
+                    System.out.print(", ");
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
new file mode 100644
index 0000000..f880c48
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
@@ -0,0 +1,315 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.kdc;
+
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.HasServer;
+import org.apache.kerby.kerberos.kerb.KrbCodec;
+import org.apache.kerby.kerberos.kerb.KrbErrorCode;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.client.KrbContext;
+import org.apache.kerby.kerberos.kerb.common.EncryptionUtil;
+import org.apache.kerby.kerberos.kerb.common.KrbUtil;
+import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
+import org.apache.kerby.kerberos.kerb.server.KdcContext;
+import org.apache.kerby.kerberos.kerb.server.KdcRecoverableException;
+import org.apache.kerby.kerberos.kerb.server.KdcServer;
+import org.apache.kerby.kerberos.kerb.server.preauth.PreauthHandler;
+import org.apache.kerby.kerberos.kerb.server.request.AsRequest;
+import org.apache.kerby.kerberos.kerb.server.request.KdcRequest;
+import org.apache.kerby.kerberos.kerb.type.KerberosTime;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+import org.apache.kerby.kerberos.kerb.type.base.HostAddress;
+import org.apache.kerby.kerberos.kerb.type.base.HostAddresses;
+import org.apache.kerby.kerberos.kerb.type.base.KrbError;
+import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
+import org.apache.kerby.kerberos.kerb.type.base.KrbToken;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
+import org.apache.kerby.kerberos.kerb.type.base.TokenFormat;
+import org.apache.kerby.kerberos.kerb.type.kdc.AsReq;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcOption;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcOptions;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcReqBody;
+import org.apache.kerby.kerberos.kerb.type.pa.PaData;
+import org.apache.kerby.kerberos.kerb.type.pa.PaDataEntry;
+import org.apache.kerby.kerberos.kerb.type.pa.PaDataType;
+import org.apache.kerby.kerberos.kerb.type.pa.token.PaTokenRequest;
+import org.apache.kerby.kerberos.kerb.type.pa.token.TokenInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class HasKdcHandler {
+    private static final Logger LOG = LoggerFactory.getLogger(HasKdcHandler.class);
+
+    private KdcContext kdcContext;
+    private KrbContext krbContext;
+    private KdcServer kdcServer;
+
+    /**
+     * Constructor with has server.
+     *
+     * @param hasServer has server
+     */
+    public HasKdcHandler(HasServer hasServer) {
+        this.krbContext = new KrbContext();
+        this.krbContext.init(hasServer.getKrbSetting());
+        this.kdcServer = hasServer.getKdcServer();
+        prepareHandler(kdcServer);
+    }
+
+    public KrbContext getKrbContext() {
+        return krbContext;
+    }
+
+    public KdcContext getKdcContext() {
+        return kdcContext;
+    }
+
+    private KdcServer getKdcServer() {
+        return kdcServer;
+    }
+
+    private void prepareHandler(KdcServer kdcServer) {
+        this.kdcContext = new KdcContext(kdcServer.getKdcSetting());
+        this.kdcContext.setIdentityService(kdcServer.getIdentityService());
+        PreauthHandler preauthHandler = new PreauthHandler();
+        preauthHandler.init();
+        this.kdcContext.setPreauthHandler(preauthHandler);
+    }
+
+    private String getAudience(String name) {
+        return name + "/" + getKdcContext().getKdcRealm() + "@" + getKdcContext().getKdcRealm();
+    }
+
+    public KrbMessage getResponse(AuthToken authToken, String passPhrase) {
+        KrbMessage krbMessage = null;
+        try {
+            krbMessage = handleMessage(authToken, passPhrase);
+        } catch (KrbException e) {
+            LOG.error("Failed to handle message. " + e.getMessage());
+        }
+        return krbMessage;
+    }
+
+    /**
+     * Process the client request message.
+     */
+    public KrbMessage handleMessage(AuthToken authToken, String passPhrase) throws KrbException {
+
+        // set the audiences
+        List<String> auds = new ArrayList<String>();
+        String audience = getAudience("krbtgt");
+        auds.add(audience);
+        authToken.setAudiences(auds);
+
+        AsReq asReq = createAsReq(authToken);
+        KdcRequest kdcRequest = new AsRequest(asReq, kdcContext);
+        kdcRequest.setHttps(true);
+        List<EncryptionType> requestedTypes = getEncryptionTypes();
+        EncryptionType bestType = EncryptionUtil.getBestEncryptionType(requestedTypes,
+                kdcContext.getConfig().getEncryptionTypes());
+
+        if (bestType == null) {
+            LOG.error("Can't get the best encryption type.");
+            throw new KrbException(KrbErrorCode.KDC_ERR_ETYPE_NOSUPP);
+        }
+
+        PrincipalName clientPrincipal = new PrincipalName(authToken.getSubject());
+        String clientRealm = asReq.getReqBody().getRealm();
+        if (clientRealm == null || clientRealm.isEmpty()) {
+            clientRealm = getKdcContext().getKdcRealm();
+        }
+        clientPrincipal.setRealm(clientRealm);
+
+        // Set the client key
+        EncryptionKey clientKey = HasUtil.getClientKey(clientPrincipal.getName(),
+            passPhrase, bestType);
+        kdcRequest.setClientKey(clientKey);
+
+        // Set the token issuers
+        getKdcServer().getKdcConfig().setString(KdcConfigKey.TOKEN_ISSUERS, "has");
+
+        KrbMessage krbResponse;
+
+        try {
+            kdcRequest.process();
+            krbResponse = kdcRequest.getReply();
+        } catch (KrbException e) {
+            LOG.error("Error occurred when request tgt. " + e.getMessage());
+            if (e instanceof KdcRecoverableException) {
+                krbResponse = handleRecoverableException(
+                        (KdcRecoverableException) e, kdcRequest);
+            } else {
+                KrbError krbError = new KrbError();
+                krbError.setStime(KerberosTime.now());
+                krbError.setSusec(100);
+                if (e.getKrbErrorCode() != null) {
+                    krbError.setErrorCode(e.getKrbErrorCode());
+                } else {
+                    krbError.setErrorCode(KrbErrorCode.UNKNOWN_ERR);
+                }
+                krbError.setCrealm(kdcContext.getKdcRealm());
+                if (kdcRequest.getClientPrincipal() != null) {
+                    krbError.setCname(kdcRequest.getClientPrincipal());
+                }
+                krbError.setRealm(kdcContext.getKdcRealm());
+                if (kdcRequest.getServerPrincipal() != null) {
+                    krbError.setSname(kdcRequest.getServerPrincipal());
+                } else {
+                    PrincipalName serverPrincipal = kdcRequest.getKdcReq().getReqBody().getSname();
+                    serverPrincipal.setRealm(kdcRequest.getKdcReq().getReqBody().getRealm());
+                    krbError.setSname(serverPrincipal);
+                }
+                if (KrbErrorCode.KRB_AP_ERR_BAD_INTEGRITY.equals(e.getKrbErrorCode())) {
+                    krbError.setEtext("PREAUTH_FAILED");
+                } else {
+                    krbError.setEtext(e.getMessage());
+                }
+                krbResponse = krbError;
+            }
+        }
+        return krbResponse;
+    }
+
+    /**
+     * Process the recoverable exception.
+     *
+     * @param e The exception return by kdc
+     * @param kdcRequest kdc request
+     * @return The KrbError
+     */
+    private KrbMessage handleRecoverableException(KdcRecoverableException e,
+                                                  KdcRequest kdcRequest)
+            throws KrbException {
+        LOG.info("KRB error occurred while processing request:"
+                + e.getMessage());
+
+        KrbError error = e.getKrbError();
+        error.setStime(KerberosTime.now());
+        error.setSusec(100);
+        error.setErrorCode(e.getKrbError().getErrorCode());
+        error.setRealm(kdcContext.getKdcRealm());
+        if (kdcRequest != null) {
+            error.setSname(kdcRequest.getKdcReq().getReqBody().getCname());
+        } else {
+            error.setSname(new PrincipalName("NONE"));
+        }
+        error.setEtext(e.getMessage());
+        return error;
+    }
+
+    public AsReq createAsReq(AuthToken authToken) throws KrbException {
+        AsReq asReq = new AsReq();
+        KdcReqBody body = makeReqBody();
+        asReq.setReqBody(body);
+
+        PaTokenRequest tokenPa = new PaTokenRequest();
+        KrbToken krbToken = new KrbToken(authToken, TokenFormat.JWT);
+        tokenPa.setToken(krbToken);
+        TokenInfo info = new TokenInfo();
+        info.setTokenVendor(authToken.getIssuer());
+        tokenPa.setTokenInfo(info);
+
+        PaDataEntry paDataEntry = new PaDataEntry();
+        paDataEntry.setPaDataType(PaDataType.TOKEN_REQUEST);
+        paDataEntry.setPaDataValue(KrbCodec.encode(tokenPa));
+
+        PaData paData = new PaData();
+        paData.addElement(paDataEntry);
+        asReq.setPaData(paData);
+        return asReq;
+    }
+
+    /**
+     * Create the KdcReqBody
+     *
+     * @return KdcReqBody
+     *
+     * @throws KrbException e
+     */
+     protected KdcReqBody makeReqBody() throws KrbException {
+        KdcReqBody body = new KdcReqBody();
+
+        long startTime = System.currentTimeMillis();
+        body.setFrom(new KerberosTime(startTime));
+
+         // set the client principal as null
+        PrincipalName cName = null;
+        body.setCname(cName);
+
+        body.setRealm(getKrbContext().getKrbSetting().getKdcRealm());
+
+        PrincipalName sName = getServerPrincipal();
+        body.setSname(sName);
+
+        body.setTill(new KerberosTime(startTime + krbContext.getTicketValidTime()));
+
+        int nonce = krbContext.generateNonce();
+        body.setNonce(nonce);
+//        setChosenNonce(nonce);
+
+        body.setKdcOptions(getKdcOptions());
+
+        HostAddresses addresses = getHostAddresses();
+        if (addresses != null) {
+            body.setAddresses(addresses);
+        }
+
+        body.setEtypes(getEncryptionTypes());
+
+        return body;
+    }
+
+    private PrincipalName getServerPrincipal() {
+        return KrbUtil.makeTgsPrincipal(getKrbContext().getKrbSetting().getKdcRealm());
+    }
+
+    private KdcOptions getKdcOptions() {
+        KdcOptions kdcOptions = new KdcOptions();
+        // By default enforce these flags
+        kdcOptions.setFlag(KdcOption.FORWARDABLE);
+        kdcOptions.setFlag(KdcOption.PROXIABLE);
+        kdcOptions.setFlag(KdcOption.RENEWABLE_OK);
+        return kdcOptions;
+    }
+
+    public HostAddresses getHostAddresses() {
+        List<HostAddress> hostAddresses = new ArrayList<HostAddress>();
+        HostAddresses addresses = null;
+        //empty
+        if (!hostAddresses.isEmpty()) {
+            addresses = new HostAddresses();
+            for (HostAddress ha : hostAddresses) {
+                addresses.addElement(ha);
+            }
+        }
+        return addresses;
+    }
+
+    public List<EncryptionType> getEncryptionTypes() {
+        List<EncryptionType> encryptionTypes = krbContext.getConfig().getEncryptionTypes();
+        return EncryptionUtil.orderEtypesByStrength(encryptionTypes);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
new file mode 100644
index 0000000..3f397fb
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
@@ -0,0 +1,52 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.kdc;
+
+import org.apache.kerby.config.ConfigKey;
+
+/**
+ * Define all the MySQL backend related configuration items with default values.
+ */
+public enum MySQLConfKey implements ConfigKey {
+    MYSQL_DRIVER("com.mysql.jdbc.Driver"),
+    MYSQL_URL("jdbc:mysql://127.0.0.1:3306/mysqlbackend"),
+    MYSQL_USER("root"),
+    MYSQL_PASSWORD("passwd");
+
+    private Object defaultValue;
+
+    MySQLConfKey() {
+        this.defaultValue = null;
+    }
+
+    MySQLConfKey(Object defaultValue) {
+        this.defaultValue = defaultValue;
+    }
+
+    @Override
+    public String getPropertyKey() {
+        return name().toLowerCase();
+    }
+
+    @Override
+    public Object getDefaultValue() {
+        return this.defaultValue;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
new file mode 100644
index 0000000..034704a
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
@@ -0,0 +1,426 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.kdc;
+
+import org.apache.commons.dbutils.DbUtils;
+import org.apache.directory.api.util.GeneralizedTime;
+import org.apache.kerby.config.Config;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.identity.KrbIdentity;
+import org.apache.kerby.kerberos.kerb.identity.backend.AbstractIdentityBackend;
+import org.apache.kerby.kerberos.kerb.type.KerberosTime;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.ResultSet;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import javax.sql.rowset.serial.SerialBlob;
+import java.util.Map;
+import java.util.List;
+import java.util.ArrayList;
+import java.text.ParseException;
+
+/**
+ * A MySQL based backend implementation.
+ */
+public class MySQLIdentityBackend extends AbstractIdentityBackend {
+    private Connection connection;
+    private String driver;
+    private String url;
+    private String user;
+    private String password;
+    private static final Logger LOG = LoggerFactory.getLogger(MySQLIdentityBackend.class);
+    private String identityTable;
+    private String keyInfoTable;
+
+    /**
+     * Constructing an instance using specified config that contains anything
+     * to be used to initialize an MySQL Backend.
+     * @param config . The config is used to config the backend.
+     */
+    public MySQLIdentityBackend(final Config config) {
+        setConfig(config);
+    }
+
+    public MySQLIdentityBackend() { }
+
+    /**
+     * Start the MySQL connection.
+     */
+    private void startConnection() throws KrbException {
+        try {
+            Class.forName(driver);
+            connection = DriverManager.getConnection(url, user, password);
+            if (!connection.isClosed()) {
+                LOG.info("Succeeded in connecting to MySQL.");
+            }
+        } catch (ClassNotFoundException e) {
+            throw new KrbException("JDBC Driver Class not found. ", e);
+        } catch (SQLException e) {
+            throw new KrbException("Failed to connecting to MySQL. ", e);
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected void doInitialize() throws KrbException {
+        LOG.info("Initializing the MySQL identity backend.");
+        driver = getConfig().getString(MySQLConfKey.MYSQL_DRIVER, true);
+        user = getConfig().getString(MySQLConfKey.MYSQL_USER, true);
+        password = getConfig().getString(MySQLConfKey.MYSQL_PASSWORD, true);
+
+        String urlString = getConfig().getString(MySQLConfKey.MYSQL_URL, true);
+        if (urlString == null || urlString.isEmpty()) {
+            urlString = getBackendConfig().getString(MySQLConfKey.MYSQL_URL, true);
+        }
+        url = urlString;
+
+        PreparedStatement preInitialize = null;
+        PreparedStatement preKdcRealm = null;
+        ResultSet resKdcRelam = null;
+        PreparedStatement preIdentity = null;
+        PreparedStatement preKey = null;
+        try {
+            startConnection();
+
+            // Set initialized for kdc config
+            String stmInitialize = "UPDATE `kdc_config` SET initialized = true WHERE id = 1";
+            preInitialize = connection.prepareStatement(stmInitialize);
+            preInitialize.executeUpdate();
+
+            // Get identity table name according to realm of kdc
+            String stmKdcRealm = "SELECT realm FROM `kdc_config`";
+            preKdcRealm = connection.prepareStatement(stmKdcRealm);
+            resKdcRelam = preKdcRealm.executeQuery();
+            if (resKdcRelam.next()) {
+                String realm = resKdcRelam.getString("realm").toLowerCase();
+                identityTable = "`" + realm + "_identity" + "`";
+                keyInfoTable = "`" + realm + "_key" + "`";
+            } else {
+                throw new KrbException("Failed to get kdc config.");
+            }
+
+            // Create identity table
+            String stmIdentity = "CREATE TABLE IF NOT EXISTS " + identityTable
+                + " (principal varchar(255) NOT NULL, key_version INTEGER "
+                + "DEFAULT 1, kdc_flags INTEGER DEFAULT 0, disabled bool "
+                + "DEFAULT NULL, locked bool DEFAULT NULL, expire_time "
+                + "VARCHAR(255) DEFAULT NULL, created_time VARCHAR(255) "
+                + "DEFAULT NULL, PRIMARY KEY (principal) ) ENGINE=INNODB;";
+            preIdentity = connection.prepareStatement(stmIdentity);
+            preIdentity.executeUpdate();
+
+            // Create key table
+            String stmKey = "CREATE TABLE IF NOT EXISTS " + keyInfoTable
+                + " (key_id INTEGER NOT NULL AUTO_INCREMENT, key_type "
+                + "VARCHAR(255) DEFAULT NULL, kvno INTEGER DEFAULT -1, "
+                + "key_value BLOB DEFAULT NULL, principal VARCHAR(255) NOT NULL,"
+                + "PRIMARY KEY (key_id), INDEX (principal), FOREIGN KEY "
+                + "(principal) REFERENCES " + identityTable + "(principal) "
+                + ") ENGINE=INNODB;";
+            preKey = connection.prepareStatement(stmKey);
+            preKey.executeUpdate();
+
+        } catch (SQLException e) {
+            LOG.error("Error occurred while initialize MySQL backend." + e.toString());
+            throw new KrbException("Failed to create table in database. ", e);
+        } finally {
+            DbUtils.closeQuietly(preInitialize);
+            DbUtils.closeQuietly(preKdcRealm);
+            DbUtils.closeQuietly(resKdcRelam);
+            DbUtils.closeQuietly(preIdentity);
+            DbUtils.closeQuietly(preKey);
+            DbUtils.closeQuietly(connection);
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected void doStop() throws KrbException {
+        try {
+            closeConnection();
+            if (connection.isClosed()) {
+                LOG.info("Succeeded in closing connection with MySQL.");
+            }
+        } catch (SQLException e) {
+            LOG.error("Failed to close connection with MySQL.");
+            throw new KrbException("Failed to close connection with MySQL. ", e);
+        }
+    }
+
+    /**
+     * Close the connection for stop().
+     * @throws SQLException if SQLException handled
+     */
+    private void closeConnection() throws SQLException {
+        if (!connection.isClosed()) {
+            connection.close();
+        }
+    }
+
+    /**
+     * Convert a KerberosTime type object to a generalized time form of String.
+     * @param kerberosTime The kerberos time to convert
+     */
+    private String toGeneralizedTime(final KerberosTime kerberosTime) {
+        GeneralizedTime generalizedTime = new GeneralizedTime(kerberosTime.getValue());
+        return generalizedTime.toString();
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected KrbIdentity doAddIdentity(KrbIdentity identity) throws KrbException {
+        String principalName = identity.getPrincipalName();
+        int keyVersion = identity.getKeyVersion();
+        int kdcFlags = identity.getKdcFlags();
+        boolean disabled = identity.isDisabled();
+        boolean locked = identity.isLocked();
+        String createdTime = toGeneralizedTime(identity.getCreatedTime());
+        String expireTime = toGeneralizedTime(identity.getExpireTime());
+        Map<EncryptionType, EncryptionKey> keys = identity.getKeys();
+
+        PreparedStatement preIdentity = null;
+        PreparedStatement preKey = null;
+
+        KrbIdentity duplicateIdentity = doGetIdentity(principalName);
+        if (duplicateIdentity != null) {
+            LOG.warn("The identity maybe duplicate.");
+
+            return duplicateIdentity;
+        } else {
+            try {
+                startConnection();
+                connection.setAutoCommit(false);
+
+                // Insert identity to identity table
+                String stmIdentity = "insert into " + identityTable + " values(?, ?, ?, ?, ?, ?, ?)";
+                preIdentity = connection.prepareStatement(stmIdentity);
+                preIdentity.setString(1, principalName);
+                preIdentity.setInt(2, keyVersion);
+                preIdentity.setInt(3, kdcFlags);
+                preIdentity.setBoolean(4, disabled);
+                preIdentity.setBoolean(5, locked);
+                preIdentity.setString(6, createdTime);
+                preIdentity.setString(7, expireTime);
+                preIdentity.executeUpdate();
+
+                // Insert keys to key table
+                for (Map.Entry<EncryptionType, EncryptionKey> entry : keys.entrySet()) {
+                    String stmKey = "insert into " + keyInfoTable + " (key_type, kvno, key_value, principal)"
+                        + " values(?, ?, ?, ?)";
+                    preKey = connection.prepareStatement(stmKey);
+                    preKey.setString(1, entry.getKey().getName());
+                    preKey.setInt(2, entry.getValue().getKvno());
+                    preKey.setBlob(3, new SerialBlob(entry.getValue().getKeyData()));
+                    preKey.setString(4, principalName);
+                    preKey.executeUpdate();
+                }
+
+                connection.commit();
+                return identity;
+            } catch (SQLException e) {
+                try {
+                    LOG.info("Transaction is being rolled back.");
+                    connection.rollback();
+                } catch (SQLException ex) {
+                    throw new KrbException("Transaction roll back failed. ", ex);
+                }
+                LOG.error("Error occurred while adding identity.");
+                throw new KrbException("Failed to add identity. ", e);
+            } finally {
+                DbUtils.closeQuietly(preIdentity);
+                DbUtils.closeQuietly(preKey);
+                doStop();
+            }
+        }
+    }
+
+    /**
+     * Create kerberos time.
+     * @param generalizedTime generalized time
+     * @throws ParseException parse exception
+     */
+    private KerberosTime createKerberosTime(final String generalizedTime) throws ParseException {
+        long time = new GeneralizedTime(generalizedTime).getTime();
+        return new KerberosTime(time);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected KrbIdentity doGetIdentity(final String principalName) throws KrbException {
+        KrbIdentity krbIdentity = new KrbIdentity(principalName);
+
+        PreparedStatement preIdentity = null;
+        ResultSet resIdentity = null;
+        PreparedStatement preKey = null;
+        ResultSet resKey = null;
+        try {
+            startConnection();
+
+            // Get identity from identity table
+            String stmIdentity = "SELECT * FROM " + identityTable + " where principal = ?";
+            preIdentity = connection.prepareStatement(stmIdentity);
+            preIdentity.setString(1, principalName);
+            resIdentity = preIdentity.executeQuery();
+
+            if (!resIdentity.next()) {
+                return null;
+            }
+
+            while (resIdentity.next()) {
+                krbIdentity.setKeyVersion(resIdentity.getInt("key_version"));
+                krbIdentity.setKdcFlags(resIdentity.getInt("kdc_flags"));
+                krbIdentity.setDisabled(resIdentity.getBoolean("disabled"));
+                krbIdentity.setLocked(resIdentity.getBoolean("locked"));
+                krbIdentity.setCreatedTime(createKerberosTime(resIdentity.getString("created_time")));
+                krbIdentity.setExpireTime(createKerberosTime(resIdentity.getString("expire_time")));
+            }
+
+            // Get keys from key table
+            List<EncryptionKey> keys = new ArrayList<>();
+            String stmKey = "SELECT * FROM " + keyInfoTable + " where principal = ?";
+            preKey = connection.prepareStatement(stmKey);
+            preKey.setString(1, principalName);
+            resKey = preKey.executeQuery();
+            while (resKey.next()) {
+                int kvno = resKey.getInt("kvno");
+                String keyType = resKey.getString("key_type");
+                EncryptionType eType = EncryptionType.fromName(keyType);
+                byte[] keyValue = resKey.getBytes("key_value");
+                EncryptionKey key = new EncryptionKey(eType, keyValue, kvno);
+                keys.add(key);
+            }
+
+            krbIdentity.addKeys(keys);
+            return krbIdentity;
+        } catch (SQLException e) {
+            LOG.error("Error occurred while getting identity.");
+            throw new KrbException("Failed to get identity. ", e);
+        } catch (ParseException e) {
+            throw new KrbException("Failed to get identity. ", e);
+        } finally {
+            DbUtils.closeQuietly(preIdentity);
+            DbUtils.closeQuietly(resIdentity);
+            DbUtils.closeQuietly(preKey);
+            DbUtils.closeQuietly(resKey);
+            doStop();
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected KrbIdentity doUpdateIdentity(KrbIdentity identity) throws KrbException {
+        String principalName = identity.getPrincipalName();
+        try {
+            doDeleteIdentity(principalName); // Delete former identity
+            doAddIdentity(identity); // Insert new identity
+        } catch (KrbException e) {
+            LOG.error("Error occurred while updating identity: " + principalName);
+            throw new KrbException("Failed to update identity. ", e);
+        }
+
+        return getIdentity(principalName);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected void doDeleteIdentity(String principalName) throws KrbException {
+        PreparedStatement preKey = null;
+        PreparedStatement preIdentity = null;
+        try {
+            startConnection();
+            connection.setAutoCommit(false);
+
+            // Delete keys from key table
+            String stmKey = "DELETE FROM  " + keyInfoTable + " where principal = ?";
+            preKey = connection.prepareStatement(stmKey);
+            preKey.setString(1, principalName);
+            preKey.executeUpdate();
+
+            // Dlete identity from identity table
+            String stmIdentity = "DELETE FROM " + identityTable + " where principal = ? ";
+            preIdentity = connection.prepareStatement(stmIdentity);
+            preIdentity.setString(1, principalName);
+            preIdentity.executeUpdate();
+
+            connection.commit();
+        } catch (SQLException e) {
+            try {
+                LOG.info("Transaction is being rolled back.");
+                connection.rollback();
+            } catch (SQLException ex) {
+                throw new KrbException("Transaction roll back failed. ", ex);
+            }
+            LOG.error("Error occurred while deleting identity.");
+            throw new KrbException("Failed to delete identity. ", e);
+        } finally {
+            DbUtils.closeQuietly(preIdentity);
+            DbUtils.closeQuietly(preKey);
+            doStop();
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected Iterable<String> doGetIdentities() throws KrbException {
+        List<String> identityNames = new ArrayList<>();
+        PreparedStatement preSmt = null;
+        ResultSet result = null;
+        try {
+            startConnection();
+            String statement = "SELECT * FROM " + identityTable;
+            preSmt = connection.prepareStatement(statement);
+            result = preSmt.executeQuery();
+            while (result.next()) {
+                identityNames.add(result.getString("principal"));
+            }
+            result.close();
+            preSmt.close();
+        } catch (SQLException e) {
+            LOG.error("Error occurred while getting identities.");
+            throw new KrbException("Failed to get identities. ", e);
+        } finally {
+            DbUtils.closeQuietly(preSmt);
+            DbUtils.closeQuietly(result);
+            doStop();
+        }
+
+        return identityNames;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
new file mode 100644
index 0000000..78ce1e9
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
@@ -0,0 +1,54 @@
+package org.apache.hadoop.has.server.web;
+
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.HasServer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import java.io.File;
+import java.io.IOException;
+@Private
+@Unstable
+public class ConfFilter implements Filter {
+    public static final Logger LOG = LoggerFactory.getLogger(ConfFilter.class);
+    @Override
+    public void init(FilterConfig filterConfig) throws ServletException {
+
+    }
+
+    @Override
+    public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse,
+                         FilterChain filterChain) throws IOException, ServletException {
+
+        final HasServer hasServer = WebServer.getHasServerFromContext(
+                servletRequest.getServletContext());
+        HasConfig hasConfig;
+        try {
+            hasConfig = HasUtil.getHasConfig(
+                    new File(hasServer.getConfDir(), "has-server.conf"));
+            String isEnableConf = hasConfig.getEnableConf();
+            if (!isEnableConf.equals("true")) {
+                throw new RuntimeException("The kdc has started.");
+            }
+            filterChain.doFilter(servletRequest, servletResponse);
+        } catch (HasException e) {
+            LOG.error(e.getMessage());
+        }
+    }
+
+    @Override
+    public void destroy() {
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
new file mode 100644
index 0000000..82bb129
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public enum HostRoleType {
+    HDFS("HDFS", new String[]{"HTTP", "hdfs"}),
+    YARN("YARN", new String[]{"yarn"}),
+    MAPRED("MAPRED", new String[]{"mapred"}),
+    HBASE("HBASE", new String[]{"hbase"}),
+    ZOOKEEPER("ZOOKEEPER", new String[]{"zookeeper"}),
+    SPARK("SPARK", new String[]{"spark"}),
+    HIVE("HIVE", new String[]{"hive"}),
+    OOZIE("OOZIE", new String[]{"oozie"}),
+    HUE("HUE", new String[]{"hue"});
+
+    private String name;
+    private String[] princs;
+
+    HostRoleType(String name, String[] princs) {
+        this.name = name;
+        this.princs = princs;
+    }
+
+    public String[] getPrincs() {
+        return princs;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
new file mode 100644
index 0000000..bd0a1ca
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.has.server.web;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+
+/** 
+ * This class contains constants for configuration keys and default values
+ * used in hdfs.
+ */
+@InterfaceAudience.Private
+public class WebConfigKey {
+
+  public static final int HAS_HTTP_PORT_DEFAULT = 9870;
+  public static final String HAS_HTTP_HOST_DEFAULT = "0.0.0.0";
+  public static final String HAS_HTTP_ADDRESS_KEY = "has.http-address";
+  public static final String HAS_HTTP_ADDRESS_DEFAULT = HAS_HTTP_HOST_DEFAULT + ":" + HAS_HTTP_PORT_DEFAULT;
+
+  public static final String HAS_HTTPS_BIND_HOST_KEY = "has.https-bind-host";
+  public static final int HAS_HTTPS_PORT_DEFAULT = 9871;
+  public static final String HAS_HTTPS_HOST_DEFAULT = "0.0.0.0";
+  public static final String HAS_HTTPS_ADDRESS_KEY = "has.https-address";
+  public static final String HAS_HTTPS_ADDRESS_DEFAULT = HAS_HTTPS_HOST_DEFAULT + ":" + HAS_HTTPS_PORT_DEFAULT;
+  public static final String HAS_HTTP_POLICY_KEY = "has.http.policy";
+  public static final String HAS_HTTP_POLICY_DEFAULT = HttpConfig.Policy.HTTPS_ONLY.name();
+
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY = "has.https.server.keystore.resource";
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT = "ssl-server.xml";
+  public static final String HAS_SERVER_HTTPS_KEYPASSWORD_KEY = "ssl.server.keystore.keypassword";
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY = "ssl.server.keystore.password";
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_LOCATION_KEY = "ssl.server.keystore.location";
+  public static final String HAS_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY = "ssl.server.truststore.location";
+  public static final String HAS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY = "ssl.server.truststore.password";
+  public static final String HAS_CLIENT_HTTPS_NEED_AUTH_KEY = "has.client.https.need-auth";
+  public static final boolean HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT = false;
+
+  public static final String HAS_AUTHENTICATION_FILTER_KEY = "has.web.authentication.filter";
+  public static final String HAS_AUTHENTICATION_FILTER_DEFAULT = AuthenticationFilter.class.getName();
+
+  public static final String HAS_AUTHENTICATION_FILTER_AUTH_TYPE = "has.authentication.filter.auth.type";
+  public static final String HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY = "has.authentication.kerberos.principal";
+  public static final String HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY = "has.authentication.kerberos.keytab";
+  public static final String HAS_AUTHENTICATION_KERBEROS_NAME_RULES = "has.authentication.kerberos.name.rules";
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
new file mode 100644
index 0000000..3e5f832
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
@@ -0,0 +1,348 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.has.server.web;
+
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.HasServer;
+import org.apache.hadoop.has.server.web.rest.HasApi;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.ServletContext;
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.Map;
+
+public class WebServer {
+    public static final Logger LOG = LoggerFactory.getLogger(WebServer.class);
+
+    private HttpServer2 httpServer;
+    private final HasConfig conf;
+
+    private InetSocketAddress httpAddress;
+    private InetSocketAddress httpsAddress;
+
+    protected static final String HAS_SERVER_ATTRIBUTE_KEY = "hasserver";
+
+    public WebServer(HasConfig conf) {
+        this.conf = conf;
+    }
+
+    public HasConfig getConf() {
+        return conf;
+    }
+
+    private void init() {
+
+        final String pathSpec = "/has/v1/*";
+
+        // add has packages
+        httpServer.addJerseyResourcePackage(HasApi.class
+                .getPackage().getName(),
+            pathSpec);
+    }
+
+    public void defineFilter() {
+        String authType = conf.getString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE);
+        if (authType.equals("kerberos")) {
+            // add authentication filter for webhdfs
+            final String className = conf.getString(
+                WebConfigKey.HAS_AUTHENTICATION_FILTER_KEY,
+                WebConfigKey.HAS_AUTHENTICATION_FILTER_DEFAULT);
+
+            final String name = className;
+
+            Map<String, String> params = getAuthFilterParams(conf);
+
+            String adminPathSpec = "/has/v1/admin/*";
+            HttpServer2.defineFilter(httpServer.getWebAppContext(), name, className,
+                params, new String[]{adminPathSpec});
+            HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
+                + ")");
+        }
+    }
+
+    public void defineConfFilter() {
+        String confFilterName = ConfFilter.class.getName();
+        String confPath = "/has/v1/conf/*";
+        HttpServer2.defineFilter(httpServer.getWebAppContext(), confFilterName, confFilterName,
+                getAuthFilterParams(conf), new String[]{confPath});
+        HttpServer2.LOG.info("Added filter '" + confFilterName + "' (class=" + confFilterName
+                + ")");
+    }
+
+    private Map<String, String> getAuthFilterParams(HasConfig conf) {
+        Map<String, String> params = new HashMap<String, String>();
+
+        String authType = conf.getString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE);
+        if (authType != null && !authType.isEmpty()) {
+            params.put(AuthenticationFilter.AUTH_TYPE, authType);
+        }
+        String principal = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
+        if (principal != null && !principal.isEmpty()) {
+            try {
+                principal = SecurityUtil.getServerPrincipal(principal,
+                    getHttpsAddress().getHostName());
+            } catch (IOException e) {
+                LOG.warn("Errors occurred when get server principal. " + e.getMessage());
+            }
+            params.put(KerberosAuthenticationHandler.PRINCIPAL, principal);
+        }
+        String keytab = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
+        if (keytab != null && !keytab.isEmpty()) {
+            params.put(KerberosAuthenticationHandler.KEYTAB, keytab);
+        }
+        String rule = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_NAME_RULES);
+        if (rule != null && !rule.isEmpty()) {
+            params.put(KerberosAuthenticationHandler.NAME_RULES, rule);
+        } else {
+            params.put(KerberosAuthenticationHandler.NAME_RULES, "DEFAULT");
+        }
+        return params;
+    }
+
+    public InetSocketAddress getBindAddress() {
+        if (httpAddress != null) {
+            return httpAddress;
+        } else if (httpsAddress != null) {
+            return httpsAddress;
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * for information related to the different configuration options and
+     * Http Policy is decided.
+     */
+    public void start() throws HasException {
+
+        HttpConfig.Policy policy = getHttpPolicy(conf);
+
+        final String bindHost =
+            conf.getString(WebConfigKey.HAS_HTTPS_BIND_HOST_KEY);
+        InetSocketAddress httpAddr = null;
+        if (policy.isHttpEnabled()) {
+            final String httpAddrString = conf.getString(
+                WebConfigKey.HAS_HTTP_ADDRESS_KEY,
+                WebConfigKey.HAS_HTTP_ADDRESS_DEFAULT);
+            httpAddr = NetUtils.createSocketAddr(httpAddrString);
+            if (bindHost != null && !bindHost.isEmpty()) {
+                httpAddr = new InetSocketAddress(bindHost, httpAddr.getPort());
+            }
+            LOG.info("Get the http address: " + httpAddr);
+        }
+
+        InetSocketAddress httpsAddr = null;
+        if (policy.isHttpsEnabled()) {
+            final String httpsAddrString = conf.getString(
+                WebConfigKey.HAS_HTTPS_ADDRESS_KEY,
+                WebConfigKey.HAS_HTTPS_ADDRESS_DEFAULT);
+            httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
+
+            if (bindHost != null && !bindHost.isEmpty()) {
+                httpsAddr = new InetSocketAddress(bindHost, httpsAddr.getPort());
+            }
+            LOG.info("Get the https address: " + httpsAddr);
+        }
+
+        HttpServer2.Builder builder = httpServerTemplateForHAS(conf, httpAddr, httpsAddr, "has");
+
+        try {
+            httpServer = builder.build();
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when building http server. " + e.getMessage());
+        }
+
+        init();
+
+        try {
+            httpServer.start();
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when starting http server. " + e.getMessage());
+        }
+        int connIdx = 0;
+        if (policy.isHttpEnabled()) {
+            httpAddress = httpServer.getConnectorAddress(connIdx++);
+            conf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY,
+                NetUtils.getHostPortString(httpAddress));
+        }
+
+        if (policy.isHttpsEnabled()) {
+            httpsAddress = httpServer.getConnectorAddress(connIdx);
+            conf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY,
+                NetUtils.getHostPortString(httpsAddress));
+        }
+    }
+
+    public void setWebServerAttribute(HasServer hasServer) {
+        httpServer.setAttribute(HAS_SERVER_ATTRIBUTE_KEY, hasServer);
+    }
+
+    public static HasServer getHasServerFromContext(ServletContext context) {
+        return (HasServer) context.getAttribute(HAS_SERVER_ATTRIBUTE_KEY);
+    }
+
+    /**
+     * Get http policy.
+     */
+    public HttpConfig.Policy getHttpPolicy(HasConfig conf) {
+        String policyStr = conf.getString(WebConfigKey.HAS_HTTP_POLICY_KEY,
+            WebConfigKey.HAS_HTTP_POLICY_DEFAULT);
+        HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
+        if (policy == null) {
+            throw new HadoopIllegalArgumentException("Unregonized value '"
+                + policyStr + "' for " + WebConfigKey.HAS_HTTP_POLICY_KEY);
+        }
+
+        conf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, policy.name());
+        return policy;
+    }
+
+    /**
+     * Return a HttpServer.Builder that the ssm can use to
+     * initialize their HTTP / HTTPS server.
+     */
+    public HttpServer2.Builder httpServerTemplateForHAS(
+        HasConfig conf, final InetSocketAddress httpAddr, final InetSocketAddress httpsAddr,
+        String name) throws HasException {
+        HttpConfig.Policy policy = getHttpPolicy(conf);
+
+        HttpServer2.Builder builder = new HttpServer2.Builder().setName(name);
+
+        if (policy.isHttpEnabled()) {
+            if (httpAddr.getPort() == 0) {
+                builder.setFindPort(true);
+            }
+
+            URI uri = URI.create("http://" + NetUtils.getHostPortString(httpAddr));
+            builder.addEndpoint(uri);
+            LOG.info("Starting Web-server for " + name + " at: " + uri);
+        }
+
+        if (policy.isHttpsEnabled() && httpsAddr != null) {
+            HasConfig sslConf = loadSslConfiguration(conf);
+            loadSslConfToHttpServerBuilder(builder, sslConf);
+
+            if (httpsAddr.getPort() == 0) {
+                builder.setFindPort(true);
+            }
+
+            URI uri = URI.create("https://" + NetUtils.getHostPortString(httpsAddr));
+            builder.addEndpoint(uri);
+            LOG.info("Starting Web-server for " + name + " at: " + uri);
+        }
+
+        return builder;
+    }
+
+    /**
+     * Load HTTPS-related configuration.
+     */
+    public HasConfig loadSslConfiguration(HasConfig conf) throws HasException {
+        HasConfig sslConf = new HasConfig();
+
+        String sslConfigString = conf.getString(
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT);
+        LOG.info("Get the ssl config file: " + sslConfigString);
+        try {
+            sslConf.addIniConfig(new File(sslConfigString));
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when adding config. " + e.getMessage());
+        }
+
+        final String[] reqSslProps = {
+            WebConfigKey.HAS_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_LOCATION_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYPASSWORD_KEY
+        };
+
+        // Check if the required properties are included
+        for (String sslProp : reqSslProps) {
+            if (sslConf.getString(sslProp) == null) {
+                LOG.warn("SSL config " + sslProp + " is missing. If "
+                    + WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY
+                    + " is specified, make sure it is a relative path");
+            }
+        }
+
+        boolean requireClientAuth = conf.getBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY,
+            WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
+        sslConf.setBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY, requireClientAuth);
+        return sslConf;
+    }
+
+    public HttpServer2.Builder loadSslConfToHttpServerBuilder(HttpServer2.Builder builder,
+                                                              HasConfig sslConf) {
+        return builder
+            .needsClientAuth(
+                sslConf.getBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY,
+                    WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT))
+            .keyPassword(getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_KEYPASSWORD_KEY))
+            .keyStore(sslConf.getString("ssl.server.keystore.location"),
+                getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY),
+                sslConf.getString("ssl.server.keystore.type", "jks"))
+            .trustStore(sslConf.getString("ssl.server.truststore.location"),
+                getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY),
+                sslConf.getString("ssl.server.truststore.type", "jks"))
+            .excludeCiphers(
+                sslConf.getString("ssl.server.exclude.cipher.list"));
+    }
+
+    /**
+     * Leverages the Configuration.getPassword method to attempt to get
+     * passwords from the CredentialProvider API before falling back to
+     * clear text in config - if falling back is allowed.
+     *
+     * @param conf  Configuration instance
+     * @param alias name of the credential to retreive
+     * @return String credential value or null
+     */
+    public String getPassword(HasConfig conf, String alias) {
+
+        return conf.getString(alias);
+    }
+
+    public void stop() throws Exception {
+        if (httpServer != null) {
+            httpServer.stop();
+        }
+    }
+
+    public InetSocketAddress getHttpAddress() {
+        return httpAddress;
+    }
+
+    public InetSocketAddress getHttpsAddress() {
+        return httpsAddress;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
new file mode 100644
index 0000000..a6fc4ce
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest;
+
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.HasServer;
+import org.apache.hadoop.has.server.web.WebServer;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * HAS configure web methods implementation.
+ */
+@Path("/conf")
+public class ConfApi {
+
+    @Context
+    private ServletContext context;
+
+    @Context
+    private HttpServletRequest httpRequest;
+
+    /**
+     * Set HAS plugin.
+     *
+     * @param plugin HAS plugin name
+     * @return Response
+     */
+    @PUT
+    @Path("/setplugin")
+    @Consumes({MediaType.TEXT_PLAIN})
+    @Produces({MediaType.TEXT_PLAIN})
+    public Response setPlugin(@QueryParam("plugin") final String plugin) {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            WebServer.LOG.info("Set HAS plugin...");
+            try {
+                Map<String, String> values = new HashMap<>();
+                File hasConfFile = new File(hasServer.getConfDir(), "has-server.conf");
+                HasConfig hasConfig = HasUtil.getHasConfig(hasConfFile);
+                if (hasConfig != null) {
+                    String defaultValue = hasConfig.getPluginName();
+                    values.put(defaultValue, plugin);
+                } else {
+                    throw new RuntimeException("has-server.conf not found. ");
+                }
+                hasServer.updateConfFile("has-server.conf", values);
+            } catch (IOException | HasException e) {
+                throw new RuntimeException("Failed to set HAS plugin. ", e);
+            }
+            WebServer.LOG.info("HAS plugin set successfully.");
+
+            return Response.status(200).entity("HAS plugin set successfully.\n").build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Config HAS server backend.
+     *
+     * @param backendType type of backend
+     * @param dir         json dir
+     * @param driver      mysql JDBC connector driver
+     * @param url         mysql JDBC connector url
+     * @param user        mysql user name
+     * @param password    mysql password of user
+     * @return Response
+     */
+    @PUT
+    @Path("/configkdcbackend")
+    @Consumes({MediaType.APPLICATION_JSON})
+    @Produces({MediaType.TEXT_PLAIN})
+    public Response configKdcBackend(
+        @QueryParam("backendType") final String backendType,
+        @QueryParam("dir") @DefaultValue("/tmp/has/jsonbackend") final String dir,
+        @QueryParam("driver") @DefaultValue("com.mysql.jdbc.Driver") final String driver,
+        @QueryParam("url") @DefaultValue("jdbc:mysql://127.0.0.1:3306/mysqlbackend") final String url,
+        @QueryParam("user") @DefaultValue("root") final String user,
+        @QueryParam("password") @DefaultValue("passwd") final String password) {
+
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            if ("json".equals(backendType)) {
+                WebServer.LOG.info("Set Json backend...");
+                try {
+                    Map<String, String> values = new HashMap<>();
+                    values.put("_JAR_", "org.apache.kerby.kerberos.kdc.identitybackend.JsonIdentityBackend");
+                    values.put("#_JSON_DIR_", "backend.json.dir = " + dir);
+                    values.put("#_MYSQL_\n", "");
+                    hasServer.updateConfFile("backend.conf", values);
+                } catch (IOException | HasException e) {
+                    throw new RuntimeException("Failed to set Json backend. ", e);
+                }
+                WebServer.LOG.info("Json backend set successfully.");
+
+                return Response.status(200).entity("Json backend set successfully.\n").build();
+            } else if ("mysql".equals(backendType)) {
+                WebServer.LOG.info("Set MySQL backend...");
+                try {
+                    String mysqlConfig = "mysql_driver = " + driver + "\nmysql_url = " + url
+                        + "\nmysql_user = " + user + "\nmysql_password = " + password;
+                    Map<String, String> values = new HashMap<>();
+                    values.put("_JAR_", "org.apache.hadoop.has.server.kdc.MySQLIdentityBackend");
+                    values.put("#_JSON_DIR_\n", "");
+                    values.put("#_MYSQL_", mysqlConfig);
+                    hasServer.updateConfFile("backend.conf", values);
+                } catch (IOException | HasException e) {
+                    throw new RuntimeException("Failed to set MySQL backend. ", e);
+                }
+                WebServer.LOG.info("MySQL backend set successfully.");
+
+                return Response.status(200).entity("MySQL backend set successfully.\n").build();
+            } else {
+                return Response.status(400).entity(backendType + " is not supported.\n").build();
+            }
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Config HAS server KDC.
+     * @param port KDC port to set
+     * @param realm KDC realm to set
+     * @param host KDC host to set
+     * @return Response
+     */
+    @PUT
+    @Path("/configkdc")
+    @Consumes({MediaType.TEXT_PLAIN})
+    @Produces({MediaType.TEXT_PLAIN})
+    public Response configKdc(
+        @QueryParam("port") final int port,
+        @QueryParam("realm") final String realm,
+        @QueryParam("host") final String host) {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            WebServer.LOG.info("Config HAS server KDC...");
+            try {
+                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
+                String backendJar = backendConfig.getString("kdc_identity_backend");
+                if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
+                    hasServer.configMySQLKdc(backendConfig, realm, port, host, hasServer);
+                } else {
+                    Map<String, String> values = new HashMap<>();
+                    values.put("_HOST_", host);
+                    values.put("_PORT_", String.valueOf(port));
+                    values.put("_REALM_", realm);
+                    hasServer.updateConfFile("kdc.conf", values);
+                    String kdc = "\t\tkdc = " + host + ":" + port;
+                    values.put("_KDCS_", kdc);
+                    values.put("_UDP_LIMIT_", "4096");
+                    hasServer.updateConfFile("krb5.conf", values);
+                }
+            } catch (IOException | HasException | KrbException e) {
+                throw new RuntimeException("Failed to config HAS KDC. ", e);
+            }
+            WebServer.LOG.info("HAS server KDC set successfully.");
+            return Response.status(200).entity("HAS server KDC set successfully.\n").build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+}


Re: [01/10] directory-kerby git commit: Add the HAS project to Kerby.

Posted by Colm O hEigeartaigh <co...@apache.org>.
Hi Jiajia,

What is this new branch for?

Colm.

On Wed, Nov 15, 2017 at 5:12 AM, <pl...@apache.org> wrote:

> Repository: directory-kerby
> Updated Branches:
>   refs/heads/has 1e6d36497 -> be5805660
>
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
> new file mode 100644
> index 0000000..322eafd
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
> @@ -0,0 +1,61 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +public class AddPrincipalCmd extends HadminCmd {
> +
> +    public static final String USAGE = "Usage: add_principal [options]
> <principal-name>\n"
> +            + "\toptions are:\n"
> +            + "\t\t[-randkey]\n"
> +            + "\t\t[-pw password]"
> +            + "\tExample:\n"
> +            + "\t\tadd_principal -pw mypassword alice\n";
> +
> +    public AddPrincipalCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +
> +        if (items.length < 2) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +
> +        String clientPrincipal = items[items.length - 1];
> +        if (!items[1].startsWith("-")) {
> +            getHadmin().addPrincipal(clientPrincipal);
> +        } else if (items[1].startsWith("-randkey")) {
> +            getHadmin().addPrincipal(clientPrincipal);
> +        } else if (items[1].startsWith("-pw")) {
> +            String password = items[2];
> +            getHadmin().addPrincipal(clientPrincipal, password);
> +        } else {
> +            System.err.println("add_principal cmd format error.");
> +            System.err.println(USAGE);
> +            return;
> +        }
> +        System.out.println("Success to add principal :" +
> clientPrincipal);
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
> new file mode 100644
> index 0000000..b38f2c7
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
> @@ -0,0 +1,78 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +import org.codehaus.jettison.json.JSONArray;
> +import org.codehaus.jettison.json.JSONObject;
> +import org.slf4j.Logger;
> +import org.slf4j.LoggerFactory;
> +
> +import java.io.BufferedReader;
> +import java.io.File;
> +import java.io.FileReader;
> +
> +public class AddPrincipalsCmd extends HadminCmd {
> +    private static final Logger LOG = LoggerFactory.getLogger(
> AddPrincipalsCmd.class);
> +
> +    private static final String USAGE = "\nUsage: create_principals
> [hostRoles-file]\n"
> +            + "\t'hostRoles-file' is a file with a hostRoles json string
> like:\n"
> +            + "\t\t{HOSTS: [ {\"name\":\"host1\",\"hostRoles\":\"HDFS\"},
> "
> +            + "{\"name\":\"host2\",\"hostRoles\":\"HDFS,HBASE\"} ] }\n"
> +            + "\tExample:\n"
> +            + "\t\tcreate_principals hostroles.txt\n";
> +
> +    public AddPrincipalsCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        if (items.length != 2) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +
> +        File hostRoles = new File(items[1]);
> +        if (!hostRoles.exists()) {
> +            throw new HasException("HostRoles file is not exists.");
> +        }
> +        try {
> +            BufferedReader reader = new BufferedReader(new
> FileReader(hostRoles));
> +            StringBuilder sb = new StringBuilder();
> +            String tempString;
> +            while ((tempString = reader.readLine()) != null) {
> +                sb.append(tempString);
> +            }
> +            JSONArray hostArray = new JSONObject(sb.toString()).
> optJSONArray("HOSTS");
> +            for (int i = 0; i < hostArray.length(); i++) {
> +                JSONObject host = (JSONObject) hostArray.get(i);
> +                String[] roles = host.getString("hostRoles").split(",");
> +                for (String role : roles) {
> +                    System.out.println(getHadmin().addPrincByRole(host.
> getString("name"),
> +                            role.toUpperCase()));
> +                }
> +            }
> +        } catch (Exception e) {
> +            throw new HasException("Failed to execute creating
> principals, because : " + e.getMessage());
> +        }
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
> new file mode 100644
> index 0000000..98458ec
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
> @@ -0,0 +1,80 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +import java.io.Console;
> +import java.util.Scanner;
> +
> +public class DeletePrincipalCmd extends HadminCmd {
> +
> +    public static final String USAGE = "Usage: delete_principal
> <principal-name>\n"
> +            + "\tExample:\n"
> +            + "\t\tdelete_principal alice\n";
> +
> +    private Boolean force = false;
> +
> +    public DeletePrincipalCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        if (items.length < 2) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +        String principal = items[items.length - 1];
> +        String reply;
> +        Console console = System.console();
> +        String prompt = "Are you sure to delete the principal? (yes/no,
> YES/NO, y/n, Y/N) ";
> +        if (console == null) {
> +            System.out.println("Couldn't get Console instance, "
> +                    + "maybe you're running this from within an IDE. "
> +                    + "Use scanner to read password.");
> +            Scanner scanner = new Scanner(System.in, "UTF-8");
> +            reply = getReply(scanner, prompt);
> +        } else {
> +            reply = getReply(console, prompt);
> +        }
> +        if (reply.equals("yes") || reply.equals("YES") ||
> reply.equals("y") || reply.equals("Y")) {
> +            getHadmin().deletePrincipal(principal);
> +            System.out.println("Success to delete " + principal);
> +        } else if (reply.equals("no") || reply.equals("NO") ||
> reply.equals("n") || reply.equals("N")) {
> +            System.out.println("Principal \"" + principal + "\"  not
> deleted.");
> +        } else {
> +            System.err.println("Unknown request, fail to delete the
> principal.");
> +            System.err.println(USAGE);
> +        }
> +    }
> +
> +    private String getReply(Scanner scanner, String prompt) {
> +        System.out.println(prompt);
> +        return scanner.nextLine().trim();
> +    }
> +
> +    private String getReply(Console console, String prompt) {
> +        console.printf(prompt);
> +        String line = console.readLine();
> +        return line;
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/
> DisableConfigureCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
> new file mode 100644
> index 0000000..66eb5cb
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
> @@ -0,0 +1,40 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +public class DisableConfigureCmd extends HadminCmd {
> +
> +    public static final String USAGE = "Usage: enable_configure\n"
> +            + "\tExample:\n"
> +            + "\t\tenable\n";
> +
> +    public DisableConfigureCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        getHadmin().setEnableOfConf("false");
> +        System.out.println("Set conf disable.");
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
> new file mode 100644
> index 0000000..f40a6c6
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
> @@ -0,0 +1,40 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +public class EnableConfigureCmd extends HadminCmd {
> +
> +    public static final String USAGE = "Usage: enable_configure\n"
> +            + "\tExample:\n"
> +            + "\t\tenable\n";
> +
> +    public EnableConfigureCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        getHadmin().setEnableOfConf("true");
> +        System.out.println("Set conf enable.");
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
> new file mode 100644
> index 0000000..c5b130c
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
> @@ -0,0 +1,57 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +import org.apache.hadoop.has.server.web.HostRoleType;
> +
> +import java.io.File;
> +
> +public class ExportKeytabsCmd extends HadminCmd {
> +    private static final String USAGE = "\nUsage: export_keytabs <host>
> [role]\n"
> +            + "\tExample:\n"
> +            + "\t\texport_keytabs host1 HDFS\n";
> +
> +    public ExportKeytabsCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        if (items.length < 2) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +        String host = items[1];
> +        if (items.length >= 3) {
> +            exportKeytab(host, items[2]);
> +            return;
> +        }
> +        for (HostRoleType r : HostRoleType.values()) {
> +            exportKeytab(host, r.getName());
> +        }
> +    }
> +
> +    public void exportKeytab(String host, String role) throws
> HasException {
> +        File keytab = new File(role + "-" + host + ".keytab");
> +        getHadmin().getKeytabByHostAndRole(host, role, keytab);
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
> new file mode 100644
> index 0000000..ebaf07f
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
> @@ -0,0 +1,36 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +public class GetHostRolesCmd extends HadminCmd {
> +    private static final String USAGE = "Usage: get_hostroles\n"
> +            + "\tExample:\n"
> +            + "\t\tget_hostroles\n";
> +
> +    public GetHostRolesCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) {
> +        getHadmin().getHostRoles();
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
> new file mode 100644
> index 0000000..88612a8
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
> @@ -0,0 +1,76 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +import org.apache.kerby.kerberos.kerb.identity.KrbIdentity;
> +import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
> +import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
> +
> +import java.util.Map;
> +
> +public class GetPrincipalCmd extends HadminCmd {
> +    private static final String USAGE = "Usage: getprinc principalName\n"
> +        + "\tExample:\n"
> +        + "\t\tgetprinc hello@TEST.COM\"\n";
> +
> +    public GetPrincipalCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +  @Override
> +  public void execute(String[] items) {
> +    if (items.length != 2) {
> +      System.err.println(USAGE);
> +      return;
> +    }
> +
> +    String princName = items[items.length - 1];
> +    KrbIdentity identity = null;
> +    try {
> +      identity = getHadmin().getPrincipal(princName);
> +    } catch (HasException e) {
> +      System.err.println("Fail to get principal: " + princName + ". " +
> e.getMessage());
> +    }
> +
> +    if (identity == null) {
> +      System.err.println(princName + " doesn't exist\n");
> +      System.err.println(USAGE);
> +      return;
> +    }
> +
> +    Map<EncryptionType, EncryptionKey> key = identity.getKeys();
> +
> +    System.out.println(
> +        "Principal: " + identity.getPrincipalName() + "\n"
> +            + "Expiration data: " + identity.getExpireTime() + "\n"
> +            + "Created time: "
> +            + identity.getCreatedTime() + "\n"
> +            + "KDC flags: " + identity.getKdcFlags() + "\n"
> +            + "Key version: " + identity.getKeyVersion() + "\n"
> +            + "Number of keys: " + key.size()
> +    );
> +
> +    for (EncryptionType keyType : key.keySet()) {
> +      System.out.println("key: " + keyType);
> +    }
> +  }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
> new file mode 100644
> index 0000000..95ce59f
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
> @@ -0,0 +1,42 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +public abstract class HadminCmd {
> +
> +    private LocalHasAdmin hadmin;
> +
> +    public HadminCmd(LocalHasAdmin hadmin) {
> +        this.hadmin = hadmin;
> +    }
> +
> +    protected LocalHasAdmin getHadmin() {
> +        return hadmin;
> +    }
> +
> +    /**
> +     * Execute the hadmin cmd.
> +     * @param input Input cmd to execute
> +     */
> +    public abstract void execute(String[] input) throws HasException;
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
> new file mode 100644
> index 0000000..99e05e2
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
> @@ -0,0 +1,91 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +import java.io.File;
> +import java.util.List;
> +
> +public class KeytabAddCmd extends HadminCmd {
> +    private static final String USAGE =
> +        "Usage: ktadd [-k[eytab] keytab] [-q] [-e keysaltlist]
> [-norandkey] [principal | -glob princ-exp] [...]";
> +
> +    private static final String DEFAULT_KEYTAB_FILE_LOCATION =
> "/etc/krb5.keytab";
> +
> +    public KeytabAddCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) {
> +
> +        String principal = null;
> +        String keytabFileLocation = null;
> +        Boolean glob = false;
> +
> +        //Since commands[0] is ktadd, the initial index is 1.
> +        int index = 1;
> +        while (index < items.length) {
> +            String command = items[index];
> +            if (command.equals("-k")) {
> +                index++;
> +                if (index >= items.length) {
> +                    System.err.println(USAGE);
> +                    return;
> +                }
> +                keytabFileLocation = items[index].trim();
> +
> +            } else if (command.equals("-glob")) {
> +                glob = true;
> +            } else if (!command.startsWith("-")) {
> +                principal = command;
> +            }
> +            index++;
> +        }
> +
> +        if (keytabFileLocation == null) {
> +            keytabFileLocation = DEFAULT_KEYTAB_FILE_LOCATION;
> +        }
> +        File keytabFile = new File(keytabFileLocation);
> +
> +        if (principal == null) {
> +            System.out.println((glob ? "princ-exp" : "principal") + " not
> specified!");
> +            System.err.println(USAGE);
> +            return;
> +        }
> +
> +        try {
> +            if (glob) {
> +                List<String> principals = getHadmin().getPrincipals(
> principal);
> +                if (principals.size() != 0) {
> +                    getHadmin().exportKeytab(keytabFile, principals);
> +                }
> +            } else {
> +                getHadmin().exportKeytab(keytabFile, principal);
> +            }
> +            System.out.println("Principal export to keytab file : " +
> keytabFile + " successful .");
> +        } catch (HasException e) {
> +            System.err.println("Principal \"" + principal + "\" fail to
> add entry to keytab."
> +                    + e.getMessage());
> +        }
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
> new file mode 100644
> index 0000000..ef9e7f7
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
> @@ -0,0 +1,63 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +import java.util.List;
> +
> +public class ListPrincipalsCmd extends HadminCmd {
> +    private static final String USAGE = "Usage: list_principals
> [expression]\n"
> +            + "\t'expression' is a shell-style glob expression that can
> contain the wild-card characters ?, *, and []."
> +            + "\tExample:\n"
> +            + "\t\tlist_principals [expression]\n";
> +
> +    public ListPrincipalsCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        if (items.length > 2) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +
> +        List<String> principalLists = null;
> +
> +        if (items.length == 1) {
> +            principalLists = getHadmin().getPrincipals();
> +        } else {
> +            //have expression
> +            String exp = items[1];
> +            principalLists = getHadmin().getPrincipals(exp);
> +        }
> +
> +        if (principalLists.size() == 0 || principalLists.size() == 1 &&
> principalLists.get(0).isEmpty()) {
> +            return;
> +        } else {
> +            System.out.println("Principals are listed:");
> +            for (int i = 0; i < principalLists.size(); i++) {
> +                System.out.println(principalLists.get(i));
> +            }
> +        }
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
> new file mode 100644
> index 0000000..2c0ba20
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
> @@ -0,0 +1,82 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +import java.io.Console;
> +import java.util.Scanner;
> +
> +public class RenamePrincipalCmd extends HadminCmd {
> +    public static final String USAGE = "Usage: rename_principal
> <old_principal_name>"
> +            + " <new_principal_name>\n"
> +            + "\tExample:\n"
> +            + "\t\trename_principal alice bob\n";
> +
> +    public RenamePrincipalCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        if (items.length < 3) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +
> +        String oldPrincipalName = items[items.length - 2];
> +        String newPrincipalName = items[items.length - 1];
> +
> +        String reply;
> +        Console console = System.console();
> +        String prompt = "Are you sure to rename the principal? (yes/no,
> YES/NO, y/n, Y/N) ";
> +        if (console == null) {
> +            System.out.println("Couldn't get Console instance, "
> +                    + "maybe you're running this from within an IDE. "
> +                    + "Use scanner to read password.");
> +            Scanner scanner = new Scanner(System.in, "UTF-8");
> +            reply = getReply(scanner, prompt);
> +        } else {
> +            reply = getReply(console, prompt);
> +        }
> +        if (reply.equals("yes") || reply.equals("YES") ||
> reply.equals("y") || reply.equals("Y")) {
> +            getHadmin().renamePrincipal(oldPrincipalName,
> newPrincipalName);
> +            System.out.println("Success to rename principal : \"" +
> oldPrincipalName
> +                + "\" to \"" + newPrincipalName + "\".");
> +        } else if (reply.equals("no") || reply.equals("NO") ||
> reply.equals("n") || reply.equals("N")) {
> +            System.out.println("Principal \"" + oldPrincipalName + "\"
> not renamed.");
> +        } else {
> +            System.err.println("Unknown request, fail to rename the
> principal.");
> +            System.err.println(USAGE);
> +        }
> +    }
> +
> +    private String getReply(Scanner scanner, String prompt) {
> +        System.out.println(prompt);
> +        return scanner.nextLine().trim();
> +    }
> +
> +    private String getReply(Console console, String prompt) {
> +        console.printf(prompt);
> +        String line = console.readLine();
> +        return line;
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/pom.xml
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/pom.xml b/has/has-tool/pom.xml
> new file mode 100644
> index 0000000..a43041a
> --- /dev/null
> +++ b/has/has-tool/pom.xml
> @@ -0,0 +1,23 @@
> +<?xml version="1.0" encoding="UTF-8"?>
> +<project xmlns="http://maven.apache.org/POM/4.0.0"
> +         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
> +         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
> http://maven.apache.org/xsd/maven-4.0.0.xsd">
> +
> +  <parent>
> +    <groupId>org.apache.hadoop</groupId>
> +    <artifactId>has-project</artifactId>
> +    <version>1.0.0-SNAPSHOT</version>
> +  </parent>
> +
> +  <modelVersion>4.0.0</modelVersion>
> +  <artifactId>has-tool</artifactId>
> +  <packaging>pom</packaging>
> +  <description>HAS tool</description>
> +  <name>HAS tool</name>
> +
> +  <modules>
> +    <module>has-client-tool</module>
> +    <module>has-server-tool</module>
> +  </modules>
> +
> +</project>
> \ No newline at end of file
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/pom.xml
> ----------------------------------------------------------------------
> diff --git a/has/pom.xml b/has/pom.xml
> new file mode 100644
> index 0000000..ad80711
> --- /dev/null
> +++ b/has/pom.xml
> @@ -0,0 +1,128 @@
> +<?xml version="1.0" encoding="UTF-8"?>
> +<!--
> +  Licensed under the Apache License, Version 2.0 (the "License");
> +  you may not use this file except in compliance with the License.
> +  You may obtain a copy of the License at
> +
> +    http://www.apache.org/licenses/LICENSE-2.0
> +
> +  Unless required by applicable law or agreed to in writing, software
> +  distributed under the License is distributed on an "AS IS" BASIS,
> +  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
> +  See the License for the specific language governing permissions and
> +  limitations under the License. See accompanying LICENSE file.
> +-->
> +
> +<project xmlns="http://maven.apache.org/POM/4.0.0"
> +         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
> +         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
> http://maven.apache.org/xsd/maven-4.0.0.xsd">
> +
> +  <parent>
> +    <groupId>org.apache</groupId>
> +    <artifactId>apache</artifactId>
> +    <version>18</version>
> +    <relativePath/>
> +  </parent>
> +
> +  <modelVersion>4.0.0</modelVersion>
> +  <groupId>org.apache.hadoop</groupId>
> +  <artifactId>has-project</artifactId>
> +  <version>1.0.0-SNAPSHOT</version>
> +  <description>Hadoop Authentication Server</description>
> +  <name>Hadoop Authentication Server</name>
> +  <packaging>pom</packaging>
> +
> +  <modules>
> +    <module>has-common</module>
> +    <module>has-plugins</module>
> +    <module>has-server</module>
> +    <module>has-client</module>
> +    <module>has-dist</module>
> +    <module>has-tool</module>
> +  </modules>
> +
> +  <properties>
> +    <commons-codec.version>1.4</commons-codec.version>
> +    <kerby.version>1.1.0-SNAPSHOT</kerby.version>
> +    <slf4j.version>1.7.25</slf4j.version>
> +    <buildtools.dir>${basedir}/build-tools</buildtools.dir>
> +  </properties>
> +
> +  <build>
> +    <plugins>
> +      <plugin>
> +        <groupId>org.apache.maven.plugins</groupId>
> +        <artifactId>maven-compiler-plugin</artifactId>
> +        <configuration>
> +          <source>1.8</source>
> +          <target>1.8</target>
> +        </configuration>
> +      </plugin>
> +
> +      <plugin>
> +        <groupId>org.apache.maven.plugins</groupId>
> +        <artifactId>maven-checkstyle-plugin</artifactId>
> +        <version>2.17</version>
> +        <configuration>
> +          <configLocation>${buildtools.dir}/has-checkstyle.xml</
> configLocation>
> +          <includeTestSourceDirectory>true</includeTestSourceDirectory>
> +          <encoding>UTF-8</encoding>
> +          <failOnViolation>true</failOnViolation>
> +        </configuration>
> +        <executions>
> +          <execution>
> +            <id>validate</id>
> +            <phase>validate</phase>
> +            <goals>
> +              <goal>check</goal>
> +            </goals>
> +          </execution>
> +        </executions>
> +      </plugin>
> +
> +      <plugin>
> +        <groupId>org.apache.maven.plugins</groupId>
> +        <artifactId>maven-surefire-plugin</artifactId>
> +        <version>2.17</version>
> +        <configuration>
> +          <runOrder>alphabetical</runOrder>
> +        </configuration>
> +      </plugin>
> +    </plugins>
> +  </build>
> +
> +  <profiles>
> +    <profile>
> +      <id>nochecks</id>
> +      <properties>
> +        <pmd.skip>true</pmd.skip>
> +        <checkstyle.skip>true</checkstyle.skip>
> +      </properties>
> +    </profile>
> +    <profile>
> +      <id>activate-buildtools-in-module</id>
> +      <activation>
> +        <file>
> +          <exists>${basedir}/../build-tools/has-checkstyle.xml</exists>
> +        </file>
> +      </activation>
> +      <properties>
> +        <buildtools.dir>${basedir}/../build-tools</buildtools.dir>
> +      </properties>
> +    </profile>
> +    <profile>
> +      <id>activate-buildtools-in-submodule</id>
> +      <activation>
> +        <file>
> +          <exists>${basedir}/../../build-tools/has-checkstyle.
> xml</exists>
> +        </file>
> +      </activation>
> +      <properties>
> +        <buildtools.dir>${basedir}/../../build-tools</buildtools.dir>
> +      </properties>
> +    </profile>
> +  </profiles>
> +
> +</project>
> +
> +
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/hadoop/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/hadoop/README.md b/has/supports/hadoop/README.md
> new file mode 100644
> index 0000000..15f177c
> --- /dev/null
> +++ b/has/supports/hadoop/README.md
> @@ -0,0 +1,339 @@
> +Enable Hadoop
> +================
> +
> +## 1. Build Hadoop
> +
> +### Apply the patch to hadoop-2.7.2 source code
> +```
> +git apply hadoop-2.7.2.patch
> +```
> +
> +### Build
> +```
> +mvn package -Pdist,native -Dtar -DskipTests -Dmaven.javadoc.skip=true
> -Dcontainer-executor.conf.dir=/etc/hadoop/conf
> +```
> +
> +### Redeploy hadoop
> +
> +## 2. Distribute and configure Keytab files
> +
> +### Create keytab and deploy krb5.conf and has-client.conf
> +Please look at [How to start HAS](https://github.com/intel-
> bigdata/has/blob/release-1.0.0/doc/has-start.md) for details.
> +
> +### Distribute keytab files to the corresponding nodes.
> +
> +### Set permission of keytab files
> +```
> +// Keytab files should be read-only
> +chmod 400 *.keytab
> +```
> +
> +## 3. Update hadoop configuration files
> +
> +### Update core-site.xml
> +add the following properties:
> +```
> +<property>
> +  <name>hadoop.security.authorization</name>
> +  <value>true</value>
> +</property>
> +<property>
> +  <name>hadoop.security.authentication</name>
> +  <value>kerberos</value>
> +</property>
> +<property>
> +   <name>hadoop.security.authentication.use.has</name>
> +   <value>true</value>
> +</property>
> +```
> +
> +### Update hdfs-site.xml
> +add the following properties:
> +```
> +<!-- General HDFS security config -->
> +<property>
> +  <name>dfs.block.access.token.enable</name>
> +  <value>true</value>
> +</property>
> +
> +<!-- NameNode security config -->
> +<property>
> +  <name>dfs.namenode.keytab.file</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +<property>
> +  <name>dfs.namenode.kerberos.principal</name>
> +  <value>hdfs/_HOST@HADOOP.COM</value>
> +</property>
> +<property>
> +  <name>dfs.namenode.kerberos.internal.spnego.principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +</property>
> +<property>
> +  <name>dfs.namenode.delegation.token.max-lifetime</name>
> +  <value>604800000</value>
> +  <description>The maximum lifetime in milliseconds for which a
> delegation token is valid.</description>
> +</property>
> +
> +<!-- Secondary NameNode security config -->
> +<property>
> +  <name>dfs.secondary.namenode.keytab.file</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +<property>
> +  <name>dfs.secondary.namenode.kerberos.principal</name>
> +  <value>hdfs/_HOST@HADOOP.COM</value>
> +</property>
> +<property>
> +  <name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<!-- DataNode security config -->
> +<property>
> +  <name>dfs.datanode.data.dir.perm</name>
> +  <value>700</value>
> +</property>
> +<property>
> +  <name>dfs.datanode.keytab.file</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +<property>
> +  <name>dfs.datanode.kerberos.principal</name>
> +  <value>hdfs/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<!-- HTTPS config -->
> +<property>
> +  <name>dfs.http.policy</name>
> +  <value>HTTPS_ONLY</value>
> +</property>
> +<property>
> +  <name>dfs.data.transfer.protection</name>
> +  <value>integrity</value>
> +</property>
> +<property>
> +  <name>dfs.web.authentication.kerberos.keytab</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +<property>
> +  <name>dfs.web.authentication.kerberos.principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +</property>
> +```
> +
> +### Configuration for HDFS HA
> +
> +> For normal configuration, please look at [HDFS High Availability](
> https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-hdfs/
> HDFSHighAvailabilityWithNFS.html)
> +
> +add the following properties in hdfs-site.xml:
> +```
> +<property>
> +  <name>dfs.journalnode.keytab.file</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +<property>
> +  <name>dfs.journalnode.kerberos.principal</name>
> +  <value>hdfs/_HOST@HADOOP.COM</value>
> +</property>
> +<property>
> +  <name>dfs.journalnode.kerberos.internal.spnego.principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +</property>
> +```
> +
> +### Update yarn-site.xml
> +add the following properties:
> +```
> +<!-- ResourceManager security config -->
> +<property>
> +  <name>yarn.resourcemanager.keytab</name>
> +  <value>/etc/hadoop/conf/yarn.keytab</value>
> +</property>
> +<property>
> +  <name>yarn.resourcemanager.principal</name>
> +  <value>yarn/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<!-- NodeManager security config -->
> +<property>
> +  <name>yarn.nodemanager.keytab</name>
> +  <value>/etc/hadoop/conf/yarn.keytab</value>
> +</property>
> +<property>
> +  <name>yarn.nodemanager.principal</name>
> +  <value>yarn/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<!-- HTTPS config -->
> +<property>
> +  <name>mapreduce.jobhistory.http.policy</name>
> +  <value>HTTPS_ONLY</value>
> +</property>
> +
> +<!-- Container executor config -->
> +<property>
> +  <name>yarn.nodemanager.container-executor.class</name>
> +  <value>org.apache.hadoop.yarn.server.nodemanager.
> LinuxContainerExecutor</value>
> +</property>
> +<property>
> +  <name>yarn.nodemanager.linux-container-executor.group</name>
> +  <value>root</value>
> +</property>
> +
> +<!-- Timeline service config, if timeline service enabled -->
> +<property>
> +  <name>yarn.timeline-service.principal</name>
> +  <value>yarn/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<property>
> +  <name>yarn.timeline-service.keytab</name>
> +  <value>/etc/hadoop/conf/yarn.keytab</value>
> +</property>
> +
> +<property>
> +  <name>yarn.timeline-service.http-authentication.type</name>
> +  <value>kerberos</value>
> +</property>
> +
> +<property>
> +  <name>yarn.timeline-service.http-authentication.kerberos.
> principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<property>
> +  <name>yarn.timeline-service.http-authentication.kerberos.keytab</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +
> +<!-- Proxy server config, if web proxy server enabled -->
> +<property>
> +  <name>yarn.web-proxy.keytab</name>
> +  <value>/etc/hadoop/conf/yarn.keytab</value>
> +</property>
> +
> +<property>
> +  <name>yarn.web-proxy.principal</name>
> +  <value>yarn/_HOST@HADOOP.COM</value>
> +</property>
> +```
> +
> +### Update mapred-site.xml
> +add the following properties:
> +```
> +<!-- MapReduce security config -->
> +<property>
> +  <name>mapreduce.jobhistory.keytab</name>
> +  <value>/etc/hadoop/conf/mapred.keytab</value>
> +</property>
> +<property>
> +  <name>mapreduce.jobhistory.principal</name>
> +  <value>mapred/_HOST@HADOOP.COM</value>
> +</property>
> +```
> +
> +### Create and configure ssl-server.xml
> +```
> +cd $HADOOP_HOME
> +cp etc/hadoop/ssl-server.xml.example etc/hadoop/ssl-server.xml
> +```
> +
> +Configure ssl-server.xml:
> +Please look at [How to deploy https](https://github.com/
> intel-bigdata/has/blob/release-1.0.0/doc/deploy-https.md).
> +
> +## 4. Configure container-executor
> +
> +### Create and configure container-executor.cfg
> +
> +Example of container-executor.cfg:
> +```
> +#configured value of yarn.nodemanager.linux-container-executor.group
> +yarn.nodemanager.linux-container-executor.group=root
> +#comma separated list of users who can not run applications
> +banned.users=bin
> +#Prevent other super-users
> +min.user.id=0
> +#comma separated list of system users who CAN run applications
> +allowed.system.users=root,nobody,impala,hive,hdfs,yarn
> +```
> +
> +Set permission:
> +```
> +mv container-executor.cfg /etc/hadoop/conf
> +// Container-executor.cfg should be read-only
> +chmod 400 container-executor.cfg
> +```
> +
> +### Set permission of container-executor:
> +```
> +chmod 6050 container-executor
> +// Test whether configuration is correct
> +container-executor --checksetup
> +```
> +
> +## 5. Setting up cross-realm for distcp
> +
> +### Setup cross realm trust between realms
> +Please look at [How to setup cross-realm](https://github.
> com/intel-bigdata/has/blob/cross-realm/doc/cross-realm.md).
> +
> +### Update core-site.xml
> +
> +Set hadoop.security.auth_to_local parameter in both clusters, add the
> following properties:
> +```
> +<!-- Set up cross realm between A.HADOOP.COM and B.HADOOP.COM -->
> +<property>
> +    <name>hadoop.security.auth_to_local</name>
> +    <value>
> +        RULE:[1:$1@$0](.*@A.HADOOP.COM)s/@A.HADOOP.COM///L
> +        RULE:[2:$1@$0](.*@A.HADOOP.COM)s/@A.HADOOP.COM///L
> +        RULE:[1:$1@$0](.*@B.HADOOP.COM)s/@B.HADOOP.COM///L
> +        RULE:[2:$1@$0](.*@B.HADOOP.COM)s/@B.HADOOP.COM///L
> +    </value>
> +</property>
> +```
> +
> +For detailed mapping rules, please look at [Mapping Rule](
> https://www.cloudera.com/documentation/enterprise/
> 5-9-x/topics/cdh_sg_kerbprin_to_sn.html).
> +
> +Test the mapping:
> +```
> +hadoop org.apache.hadoop.security.HadoopKerberosName hdfs/
> localhost@A.HADOOP.COM
> +```
> +
> +### Update hdfs-site.xml
> +add the following properties in client-side:
> +```
> +<!-- Control allowed realms to authenticate with -->
> +<property>
> +    <name>dfs.namenode.kerberos.principal.pattern</name>
> +    <value>*</value>
> +</property>
> +```
> +
> +### Validate
> +Test trust is setup by running hdfs commands from A.HADOOP.COM to
> B.HADOOP.COM, run the following command on the node of A.HADOOP.COM
> cluster:
> +```
> +hdfs dfs –ls hdfs://<NameNode_FQDN_for_B.HADOOP.COM_Cluster>:8020/
> +```
> +
> +### Distcp between secure clusters
> +
> +Run the distcp command:
> +```
> +hadoop distcp hdfs://<Cluster_A_URI> hdfs://<Cluster_B_URI>
> +```
> +
> +### Distcp between secure and insecure clusters
> +
> +Add the following properties in core-site.xml:
> +```
> +<property>
> +  <name>ipc.client.fallback-to-simple-auth-allowed</name>
> +  <value>true</value>
> +</property>
> +```
> +
> +Or run the distcp command with security setting:
> +```
> +hadoop distcp -D ipc.client.fallback-to-simple-auth-allowed=true
> hdfs://<Cluster_A_URI> hdfs://<Cluster_B_URI>
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/hadoop/hadoop-2.7.2.patch
> ----------------------------------------------------------------------
> diff --git a/has/supports/hadoop/hadoop-2.7.2.patch
> b/has/supports/hadoop/hadoop-2.7.2.patch
> new file mode 100644
> index 0000000..336a83d
> --- /dev/null
> +++ b/has/supports/hadoop/hadoop-2.7.2.patch
> @@ -0,0 +1,152 @@
> +diff --git a/hadoop-common-project/hadoop-auth/pom.xml
> b/hadoop-common-project/hadoop-auth/pom.xml
> +index aa3c2c7..e4f1fd2 100644
> +--- a/hadoop-common-project/hadoop-auth/pom.xml
> ++++ b/hadoop-common-project/hadoop-auth/pom.xml
> +@@ -143,6 +143,11 @@
> +       <artifactId>curator-test</artifactId>
> +       <scope>test</scope>
> +     </dependency>
> ++    <dependency>
> ++      <groupId>org.apache.hadoop</groupId>
> ++      <artifactId>has-client</artifactId>
> ++     <version>1.0.0-SNAPSHOT</version>
> ++    </dependency>
> +   </dependencies>
> +
> +   <build>
> +diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> b/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> +index f7f5f63..80b7aca 100644
> +--- a/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> ++++ b/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> +@@ -44,7 +44,8 @@
> +   public static String getKrb5LoginModuleName() {
> +     return System.getProperty("java.vendor").contains("IBM")
> +       ? "com.ibm.security.auth.module.Krb5LoginModule"
> +-      : "com.sun.security.auth.module.Krb5LoginModule";
> ++//      : "com.sun.security.auth.module.Krb5LoginModule";
> ++      :"org.apache.hadoop.has.client.HasLoginModule";
> +   }
> +
> +   public static Oid getOidInstance(String oidName)
> +diff --git a/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> b/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> +index 65e4166..f5224bb 100644
> +--- a/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> ++++ b/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> +@@ -89,6 +89,8 @@
> +   private static boolean shouldRenewImmediatelyForTests = false;
> +   static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
> +   static final String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
> ++  public static final String HADOOP_SECURITY_AUTHENTICATION_USE_HAS
> ++    = "hadoop.security.authentication.use.has";
> +
> +   /**
> +    * For the purposes of unit tests, we want to test login
> +@@ -460,6 +462,9 @@ public String toString() {
> +       "hadoop-user-kerberos";
> +     private static final String KEYTAB_KERBEROS_CONFIG_NAME =
> +       "hadoop-keytab-kerberos";
> ++    private static final String HAS_KERBEROS_CONFIG_NAME =
> ++      "hadoop-has-kerberos";
> ++
> +
> +     private static final Map<String, String> BASIC_JAAS_OPTIONS =
> +       new HashMap<String,String>();
> +@@ -516,6 +521,29 @@ public String toString() {
> +       KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
> +       KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
> +     }
> ++
> ++    private static final Map<String, String> HAS_KERBEROS_OPTIONS =
> ++        new HashMap<String, String>();
> ++
> ++    static {
> ++      if (IBM_JAVA) {
> ++        HAS_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
> ++      } else {
> ++        HAS_KERBEROS_OPTIONS.put("doNotPrompt", "true");
> ++        HAS_KERBEROS_OPTIONS.put("useTgtTicket", "true");
> ++        HAS_KERBEROS_OPTIONS.put("hadoopSecurityHas",
> conf.get("hadoop.security.has"));
> ++      }
> ++      HAS_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
> ++    }
> ++
> ++    private static final AppConfigurationEntry HAS_KERBEROS_LOGIN =
> ++      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
> ++                                LoginModuleControlFlag.OPTIONAL,
> ++                                HAS_KERBEROS_OPTIONS);
> ++    private static final AppConfigurationEntry[] HAS_KERBEROS_CONF =
> ++      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HAS_KERBEROS_LOGIN,
> ++                                  HADOOP_LOGIN};
> ++
> +     private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
> +       new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
> +                                 LoginModuleControlFlag.REQUIRED,
> +@@ -546,6 +574,8 @@ public String toString() {
> +         }
> +         KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
> +         return KEYTAB_KERBEROS_CONF;
> ++      } else if(HAS_KERBEROS_CONFIG_NAME.equals(appName)) {
> ++        return HAS_KERBEROS_CONF;
> +       }
> +       return null;
> +     }
> +@@ -792,9 +822,16 @@ static void loginUserFromSubject(Subject subject)
> throws IOException {
> +       if (subject == null) {
> +         subject = new Subject();
> +       }
> +-      LoginContext login =
> +-          newLoginContext(authenticationMethod.getLoginAppName(),
> +-                          subject, new HadoopConfiguration());
> ++      LoginContext login = null;
> ++      if (authenticationMethod.equals(AuthenticationMethod.KERBEROS)
> ++        && conf.getBoolean(HADOOP_SECURITY_AUTHENTICATION_USE_HAS,
> false)) {
> ++        login = newLoginContext(HadoopConfiguration.HAS_
> KERBEROS_CONFIG_NAME,
> ++          subject, new HadoopConfiguration());
> ++      } else {
> ++        login = newLoginContext(authenticationMethod.getLoginAppName(),
> ++          subject, new HadoopConfiguration());
> ++      }
> ++
> +       login.login();
> +       UserGroupInformation realUser = new UserGroupInformation(subject);
> +       realUser.setLogin(login);
> +@@ -925,6 +962,39 @@ public void run() {
> +       }
> +     }
> +   }
> ++
> ++  /**
> ++   * Log a user in from a tgt ticket.
> ++   * @throws IOException
> ++   */
> ++  @InterfaceAudience.Public
> ++  @InterfaceStability.Evolving
> ++  public synchronized
> ++  static void loginUserFromHas() throws IOException {
> ++    if (!isSecurityEnabled())
> ++      return;
> ++
> ++    Subject subject = new Subject();
> ++    LoginContext login;
> ++    long start = 0;
> ++    try {
> ++      login = newLoginContext(HadoopConfiguration.HAS_
> KERBEROS_CONFIG_NAME,
> ++            subject, new HadoopConfiguration());
> ++      start = Time.now();
> ++      login.login();
> ++      metrics.loginSuccess.add(Time.now() - start);
> ++      loginUser = new UserGroupInformation(subject);
> ++      loginUser.setLogin(login);
> ++      loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
> ++    } catch (LoginException le) {
> ++      if (start > 0) {
> ++        metrics.loginFailure.add(Time.now() - start);
> ++      }
> ++      throw new IOException("Login failure for " + le, le);
> ++    }
> ++    LOG.info("Login successful for user " + loginUser.getUserName());
> ++  }
> ++
> +   /**
> +    * Log a user in from a keytab file. Loads a user identity from a
> keytab
> +    * file and logs them in. They become the currently logged-in user.
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/hbase/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/hbase/README.md b/has/supports/hbase/README.md
> new file mode 100644
> index 0000000..d55a35c
> --- /dev/null
> +++ b/has/supports/hbase/README.md
> @@ -0,0 +1,154 @@
> +Enable HBase
> +===============
> +
> +## 1. Apply the patch to hadoop-2.5.1 source code
> +```
> +git apply hbase-1.1.10-hadoop-2.5.1.patch
> +```
> +
> +## 2. Build
> +```
> +mvn clean package -DskipTests
> +```
> +
> +## 3. Copy the hadoop-auth jar and hadoop-common jar to hbase lib
> +```
> +cp hadoop/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.5.1.jar
> $HBASE_HOME/lib/
> +cp hadoop/hadoop-common-project/hadoop-common/target/hadoop-common-2.5.1.jar
> $HBASE_HOME/lib/
> +```
> +
> +## 4. Update hbase security configuration
> +
> +### Update conf/hbase-site.xml
> +```
> +<property>
> +  <name>hbase.security.authentication</name>
> +  <value>kerberos</value>
> +</property>
> +
> +<property>
> +  <name>hbase.rpc.engine</name>
> +  <value>org.apache.hadoop.hbase.ipc.SecureRpcEngine</value>
> +</property>
> +
> +<property>
> +  <name>hbase.regionserver.kerberos.principal</name>
> +  <value>hbase/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<property>
> +  <name>hbase.regionserver.keytab.file</name>
> +  <value>/path/to/hbase.keytab</value>
> +</property>
> +
> +<property>
> +  <name>hbase.master.kerberos.principal</name>
> +  <value>hbase/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<property>
> +  <name>hbase.master.keytab.file</name>
> +  <value>/path/to/hbase.keytab</value>
> +</property>
> +```
> +
> +### Update /etc/hbase/conf/zk-jaas.conf
> +```
> +Client {
> +      com.sun.security.auth.module.Krb5LoginModule required
> +      useKeyTab=true
> +      keyTab="/path/to/hbase.keytab"
> +      storeKey=true
> +      useTicketCache=false
> +      principal="hbase/_HOST@HADOOP.COM";
> +};
> +```
> +
> +> Note "_HOST" should be replaced with the specific hostname.
> +
> +### Update conf/hbase-env.sh
> +```
> +export HBASE_OPTS="$HBASE_OPTS -Djava.security.auth.login.
> config=/etc/hbase/conf/zk-jaas.conf"
> +export HBASE_MANAGES_ZK=false
> +```
> +
> +### Update conf/hbase-site.xml on each HBase server host
> +```
> +<configuration>
> +  <property>
> +    <name>hbase.zookeeper.quorum</name>
> +    <value>$ZK_NODES</value>
> +  </property>
> +
> +  <property>
> +    <name>hbase.cluster.distributed</name>
> +    <value>true</value>
> +  </property>
> +</configuration>
> +```
> +
> +## 5. Update hadoop configuration to support JSVC instead of SASL
> +
> +### install jsvc for each host of hadoop cluster
> +```
> +sudo apt-get install jsvc
> +```
> +
> +> Download commons-daemon-xxx.jar from  http://archive.apache.org/
> dist/commons/daemon/binaries/
> +
> +```
> +export CLASSPATH=$CLASSPATH:/path/to/commons-daemon-xxx.jar
> +```
> +
> +### Update hadoop/etc/hadoop/hadoop-env.sh
> +```
> +export HADOOP_SECURE_DN_USER=root
> +export HADOOP_SECURE_DN_PID_DIR=$HADOOP_HOME/$DN_USER/pids
> +export HADOOP_SECURE_DN_LOG_DIR=$HADOOP_HOME/$DN_USER/logs
> +
> +export JSVC_HOME=/usr/bin
> +```
> +
> +### Disable https in hadoop/etc/hadoop/hdfs-site.xml
> +
> +***REMOVE*** following configurations
> +```
> +<!-- HTTPS config -->
> +<property>
> +  <name>dfs.http.policy</name>
> +  <value>HTTPS_ONLY</value>
> +</property>
> +<property>
> +  <name>dfs.data.transfer.protection</name>
> +  <value>integrity</value>
> +</property>
> +```
> +
> +### Update hadoop/etc/hadoop/hdfs-site.xml
> +```
> +<property>
> +    <name>dfs.datanode.address</name>
> +    <value>0.0.0.0:1004</value>
> +</property>
> +<property>
> +    <name>dfs.datanode.http.address</name>
> +    <value>0.0.0.0:1006</value>
> +</property>
> +```
> +
> +> The datanode ports range from 0 to 1023.
> +
> +## 6. Start hbase
> +
> +### Restart namenode and datanode in jsvc
> +```
> +sbin/stop-dfs.sh // stop hdfs first
> +
> +sbin/hadoop-daemon.sh start nameonode // start namenode
> +sbin/start-secure-dns.sh // start datanode
> +```
> +
> +### Start hbase
> +```
> +bin/start-hbase.sh
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
> ----------------------------------------------------------------------
> diff --git a/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
> b/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
> new file mode 100644
> index 0000000..bef04b4
> --- /dev/null
> +++ b/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
> @@ -0,0 +1,136 @@
> +diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> b/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> +index ca0fce2..b43476d 100644
> +--- a/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> ++++ b/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> +@@ -44,7 +44,8 @@
> +   public static String getKrb5LoginModuleName() {
> +     return System.getProperty("java.vendor").contains("IBM")
> +       ? "com.ibm.security.auth.module.Krb5LoginModule"
> +-      : "com.sun.security.auth.module.Krb5LoginModule";
> ++//      : "com.sun.security.auth.module.Krb5LoginModule";
> ++      :"org.apache.hadoop.has.client.HasLoginModule";
> +   }
> +
> +   public static Oid getOidInstance(String oidName)
> +diff --git a/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> b/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> +index 4f117fd..7a8fc43 100644
> +--- a/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> ++++ b/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> +@@ -88,8 +88,10 @@
> +   private static final float TICKET_RENEW_WINDOW = 0.80f;
> +   static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
> +   static final String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
> +-
> +-  /**
> ++  public static final String HADOOP_SECURITY_AUTHENTICATION_USE_HAS
> ++    = "hadoop.security.authentication.use.has";
> ++
> ++  /**
> +    * UgiMetrics maintains UGI activity statistics
> +    * and publishes them through the metrics interfaces.
> +    */
> +@@ -434,6 +436,8 @@ public String toString() {
> +       "hadoop-user-kerberos";
> +     private static final String KEYTAB_KERBEROS_CONFIG_NAME =
> +       "hadoop-keytab-kerberos";
> ++     private static final String HAS_KERBEROS_CONFIG_NAME =
> ++      "hadoop-has-kerberos";
> +
> +     private static final Map<String, String> BASIC_JAAS_OPTIONS =
> +       new HashMap<String,String>();
> +@@ -490,6 +494,29 @@ public String toString() {
> +       KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
> +       KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
> +     }
> ++
> ++    private static final Map<String, String> HAS_KERBEROS_OPTIONS =
> ++        new HashMap<String, String>();
> ++
> ++    static {
> ++      if (IBM_JAVA) {
> ++        HAS_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
> ++      } else {
> ++        HAS_KERBEROS_OPTIONS.put("doNotPrompt", "true");
> ++        HAS_KERBEROS_OPTIONS.put("useTgtTicket", "true");
> ++        HAS_KERBEROS_OPTIONS.put("hadoopSecurityHas",
> conf.get("hadoop.security.has"));
> ++      }
> ++      HAS_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
> ++    }
> ++
> ++    private static final AppConfigurationEntry HAS_KERBEROS_LOGIN =
> ++      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
> ++                                LoginModuleControlFlag.OPTIONAL,
> ++                                HAS_KERBEROS_OPTIONS);
> ++    private static final AppConfigurationEntry[] HAS_KERBEROS_CONF =
> ++      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HAS_KERBEROS_LOGIN,
> ++                                  HADOOP_LOGIN};
> ++
> +     private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
> +       new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
> +                                 LoginModuleControlFlag.REQUIRED,
> +@@ -520,11 +547,45 @@ public String toString() {
> +         }
> +         KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
> +         return KEYTAB_KERBEROS_CONF;
> ++      } else if(HAS_KERBEROS_CONFIG_NAME.equals(appName)) {
> ++        return HAS_KERBEROS_CONF;
> +       }
> +       return null;
> +     }
> +   }
> +
> ++  /**
> ++   * Log a user in from a tgt ticket.
> ++   * @throws IOException
> ++   */
> ++  @InterfaceAudience.Public
> ++  @InterfaceStability.Evolving
> ++  public synchronized
> ++  static void loginUserFromHas() throws IOException {
> ++    if (!isSecurityEnabled())
> ++      return;
> ++
> ++    Subject subject = new Subject();
> ++    LoginContext login;
> ++    long start = 0;
> ++    try {
> ++      login = newLoginContext(HadoopConfiguration.HAS_
> KERBEROS_CONFIG_NAME,
> ++            subject, new HadoopConfiguration());
> ++      start = Time.now();
> ++      login.login();
> ++      metrics.loginSuccess.add(Time.now() - start);
> ++      loginUser = new UserGroupInformation(subject);
> ++      loginUser.setLogin(login);
> ++      loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
> ++    } catch (LoginException le) {
> ++      if (start > 0) {
> ++        metrics.loginFailure.add(Time.now() - start);
> ++      }
> ++      throw new IOException("Login failure for " + le, le);
> ++    }
> ++    LOG.info("Login successful for user " + loginUser.getUserName());
> ++  }
> ++
> +   private static String prependFileAuthority(String keytabPath) {
> +     return keytabPath.startsWith("file://") ? keytabPath
> +         : "file://" + keytabPath;
> +@@ -751,9 +812,16 @@ static void loginUserFromSubject(Subject subject)
> throws IOException {
> +       if (subject == null) {
> +         subject = new Subject();
> +       }
> +-      LoginContext login =
> +-          newLoginContext(authenticationMethod.getLoginAppName(),
> +-                          subject, new HadoopConfiguration());
> ++      LoginContext login = null;
> ++      if (authenticationMethod.equals(AuthenticationMethod.KERBEROS)
> ++        && conf.getBoolean(HADOOP_SECURITY_AUTHENTICATION_USE_HAS,
> false)) {
> ++        login = newLoginContext(HadoopConfiguration.HAS_
> KERBEROS_CONFIG_NAME,
> ++          subject, new HadoopConfiguration());
> ++      } else {
> ++        login = newLoginContext(authenticationMethod.getLoginAppName(),
> ++          subject, new HadoopConfiguration());
> ++      }
> ++
> +       login.login();
> +       UserGroupInformation realUser = new UserGroupInformation(subject);
> +       realUser.setLogin(login);
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/hive/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/hive/README.md b/has/supports/hive/README.md
> new file mode 100644
> index 0000000..2fa1195
> --- /dev/null
> +++ b/has/supports/hive/README.md
> @@ -0,0 +1,55 @@
> +Enable Hive
> +==============
> +
> +## Hive on hdfs
> +
> +### 1. Enabling Kerberos Authentication for HiveServer2
> +> Update hive-site.xml
> +```
> +<property>
> +  <name>hive.server2.authentication</name>
> +  <value>KERBEROS</value>
> +</property>
> +<property>
> +  <name>hive.server2.authentication.kerberos.principal</name>
> +  <value>hive/_HOST@HADOOP.COM</value>
> +</property>
> +<property>
> +  <name>hive.server2.authentication.kerberos.keytab</name>
> +  <value>/path/to/hive.keytab</value>
> +</property>
> +```
> +
> +### 2. Enable impersonation in HiveServer2
> +> Update hive-site.xml
> +```
> +<property>
> +  <name>hive.server2.enable.impersonation</name>
> +  <description>Enable user impersonation for HiveServer2</description>
> +  <value>true</value>
> +</property>
> +```
> +
> +> Update core-site.xml of hadoop
> +```
> +<property>
> +  <name>hadoop.proxyuser.hive.hosts</name>
> +  <value>*</value>
> +</property>
> +<property>
> +  <name>hadoop.proxyuser.hive.groups</name>
> +  <value>*</value>
> +</property>
> +```
> +
> +### 3. Start Hive
> +> start sevice
> +```
> +hive --service metastore &
> +hive --service hiveserver2 &
> +```
> +
> +> start hive shell
> +```
> +hive
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/oozie/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/oozie/README.md b/has/supports/oozie/README.md
> new file mode 100644
> index 0000000..4760f97
> --- /dev/null
> +++ b/has/supports/oozie/README.md
> @@ -0,0 +1,105 @@
> +Enable Oozie
> +===============
> +
> +## 1. Update oozie-site.xml
> +add the following properties:
> +```
> +<property>
> +  <name>oozie.service.AuthorizationService.security.enabled</name>
> +  <value>true</value>
> +  <description>Specifies whether security (user name/admin role) is
> enabled or not.
> +   If it is disabled any user can manage the Oozie system and manage any
> job.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.service.HadoopAccessorService.kerberos.enabled</name>
> +  <value>true</value>
> +</property>
> +
> +<property>
> +  <name>local.realm</name>
> +  <value>HADOOP.COM</value>
> +  <description>HAS Realm.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.service.HadoopAccessorService.keytab.file</name>
> +  <value>/etc/oozie/conf/oozie.keytab</value>
> +  <description>The keytab of the Oozie service.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.service.HadoopAccessorService.kerberos.principal</name>
> +  <value>oozie/_HOST@HADOOP.COM</value>
> +  <description>Principal of Oozie service.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.authentication.kerberos.principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +  <description>Must use the hostname of the Oozie Server.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.authentication.kerberos.keytab</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +  <description>Location of the hdfs keytab file which contains the HTTP
> principal.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.authentication.type</name>
> +  <value>kerberos</value>
> +  <description></description>
> +</property>
> +
> +<property>
> +  <name>oozie.authentication.kerberos.name.rules</name>
> +  <value>DEFAULT</value>
> +  <description>The mapping from principal names to local service user
> names.</description>
> +</property>
> +```
> +
> +> Note "_HOST" should be replaced with the specific hostname.
> +
> +## 2. Start oozie
> +```
> +bin/oozied.sh start
> +```
> +
> +## 3. Using kinit to get the credential cache
> +
> +## 4. Using the Oozie command line tool check the status of Oozie:
> +```
> +bin/oozie.sh admin -oozie http://<host>:11000/oozie -status
> +```
> +
> +return:
> +```
> +System mode: NORMAL
> +```
> +
> +## 5. Using the curl to check the status of Oozie:
> +```
> +curl -i --negotiate -u : "http://<host>:11000/oozie/v1/admin/status"
> +```
> +
> +return:
> +```
> +HTTP/1.1 401 Unauthorized
> +Server: Apache-Coyote/1.1
> +WWW-Authenticate: Negotiate
> +Set-Cookie: hadoop.auth=; Path=/; Expires=Thu, 01-Jan-1970 00:00:00 GMT;
> HttpOnly
> +Content-Type: text/html;charset=utf-8
> +Content-Length: 997
> +Date: Wed, 28 Jun 2017 03:45:28 GMT
> +
> +HTTP/1.1 200 OK
> +Server: Apache-Coyote/1.1
> +WWW-Authenticate: Negotiate YGoGCSqGSIb3EgECAgIAb1swWaADAgEFoQMCAQ+
> iTTBLoAMCARGiRARCzCqLa8uqKUk6UlJfN02KC79DDFpStTBieqHBfhYEm6S
> 1GyrP29Sr3hC4lYl4U42NFSwTb/ySjqu3EpOhBJo5Bg4h
> +Set-Cookie: hadoop.auth="u=oozie&p=oozie/_HOST@EXAMPLE.COM&t=kerberos&e=
> 1498657528799&s=waJ0DZ80kcA2Gc9pYMNIGsIAC5Y="; Path=/; Expires=Wed,
> 28-Jun-2017 13:45:28 GMT; HttpOnly
> +Content-Type: application/json;charset=UTF-8
> +Content-Length: 23
> +Date: Wed, 28 Jun 2017 03:45:28 GMT
> +
> +{"systemMode":"NORMAL"}
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/phoenix/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/phoenix/README.md b/has/supports/phoenix/README.
> md
> new file mode 100644
> index 0000000..05755fb
> --- /dev/null
> +++ b/has/supports/phoenix/README.md
> @@ -0,0 +1,30 @@
> +Enable Phoenix
> +=================
> +
> +## 1. Use SQLline to connect secure hbase
> +```
> +sqlline.py <zk_quorum>:<zk_port>:<zk_hbase_path>:<principal>:<
> keytab_file>
> +// An example:
> +sqlline.py localhost:2181:/hbase:hbase/localhost@EXAMPLE.COM:/home/
> hadoop/keytab/hbase.keytab
> +```
> +
> +## 2. Configuring phoenix query server
> +
> +### Update hbase-site.xml
> +add the following properties:
> +```
> +<property>
> +    <name>phoenix.queryserver.kerberos.principal</name>
> +    <value>hbase/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<property>
> +    <name>phoenix.queryserver.keytab.file</name>
> +    <value>/home/hadoop/keytab/hbase.keytab</value>
> +</property>
> +```
> +
> +### Start phoenix query server
> +```
> +queryserver.py start
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/presto/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/presto/README.md b/has/supports/presto/README.md
> new file mode 100644
> index 0000000..244efe6
> --- /dev/null
> +++ b/has/supports/presto/README.md
> @@ -0,0 +1,24 @@
> +Enable Presto
> +================
> +
> +## 1. Hive Security Configuration
> +Update catalog/hive.properties, Add the following properties:
> +```
> +<!-- Config to connect Kerberized hive metastore -->
> +hive.metastore.authentication.type=KERBEROS
> +hive.metastore.service.principal=hbase/_HOST@HADOOP.COM
> +hive.metastore.client.principal=hbase/_HOST@HADOOP.COM
> +hive.metastore.client.keytab=/path/to/hbase.keytab
> +
> +<!-- Config to connect kerberized hdfs -->
> +hive.hdfs.authentication.type=KERBEROS
> +hive.hdfs.presto.principal=hbase/_HOST@HADOOP.COM
> +hive.hdfs.presto.keytab=/path/to/hbase.keytab
> +```
> +
> +> Note "_HOST" should be replaced with the specific hostname.
> +
> +## 2. Restart presto server
> +```
> +/bin/launcher restart
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/spark/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/spark/README.md b/has/supports/spark/README.md
> new file mode 100644
> index 0000000..f08ce50
> --- /dev/null
> +++ b/has/supports/spark/README.md
> @@ -0,0 +1,26 @@
> +Enable Spark
> +===============
> +
> +## 1. Update spark-env.sh
> +```
> +SPARK_HISTORY_OPTS=-Dspark.history.kerberos.enabled=true \
> +-Dspark.history.kerberos.principal=<sp...@HADOOP.COM> \
> +-Dspark.history.kerberos.keytab=<keytab>
> +```
> +
> +> Note "_HOST" should be replaced with the specific hostname.
> +
> +## 2. Spark-submit job
> +> YARN mode supported only
> +```
> +/bin/spark-submit \
> +  --keytab <keytab> \
> +  --principal <sp...@HADOOP.COM> \
> +  --class <main-class>
> +  --master <master-url> \
> +  --deploy-mode <deploy-mode> \
> +  --conf <key>=<value> \
> +  ... # other options
> +  <application-jar> \
> +  <application-arguments>
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/spark/spark-v2.1.1.patch
> ----------------------------------------------------------------------
> diff --git a/has/supports/spark/spark-v2.1.1.patch
> b/has/supports/spark/spark-v2.1.1.patch
> new file mode 100644
> index 0000000..c7e40b7
> --- /dev/null
> +++ b/has/supports/spark/spark-v2.1.1.patch
> @@ -0,0 +1,51 @@
> +diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
> b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
> +index 443f1f5..1fc66f0 100644
> +--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
> ++++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
> +@@ -553,7 +553,9 @@ object SparkSubmit {
> +
> +     // assure a keytab is available from any place in a JVM
> +     if (clusterManager == YARN || clusterManager == LOCAL) {
> +-      if (args.principal != null) {
> ++      if (args.useHas) {
> ++        UserGroupInformation.loginUserFromHas()
> ++      } else if (args.principal != null) {
> +         require(args.keytab != null, "Keytab must be specified when
> principal is specified")
> +         if (!new File(args.keytab).exists()) {
> +           throw new SparkException(s"Keytab file: ${args.keytab} does
> not exist")
> +diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
> b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
> +index f1761e7..5e48419 100644
> +--- a/core/src/main/scala/org/apache/spark/deploy/
> SparkSubmitArguments.scala
> ++++ b/core/src/main/scala/org/apache/spark/deploy/
> SparkSubmitArguments.scala
> +@@ -78,6 +78,8 @@ private[deploy] class SparkSubmitArguments(args:
> Seq[String], env: Map[String, S
> +   var submissionToRequestStatusFor: String = null
> +   var useRest: Boolean = true // used internally
> +
> ++  var useHas: Boolean = false
> ++
> +   /** Default properties present in the currently defined defaults file.
> */
> +   lazy val defaultSparkProperties: HashMap[String, String] = {
> +     val defaultProperties = new HashMap[String, String]()
> +@@ -438,6 +440,9 @@ private[deploy] class SparkSubmitArguments(args:
> Seq[String], env: Map[String, S
> +       case USAGE_ERROR =>
> +         printUsageAndExit(1)
> +
> ++      case USE_HAS =>
> ++        useHas = true
> ++
> +       case _ =>
> +         throw new IllegalArgumentException(s"Unexpected argument
> '$opt'.")
> +     }
> +diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
> b/launcher/src/main/java/org/apache/spark/launcher/
> SparkSubmitOptionParser.java
> +index 6767cc5..49a7678 100644
> +--- a/launcher/src/main/java/org/apache/spark/launcher/
> SparkSubmitOptionParser.java
> ++++ b/launcher/src/main/java/org/apache/spark/launcher/
> SparkSubmitOptionParser.java
> +@@ -76,6 +76,8 @@ class SparkSubmitOptionParser {
> +   protected final String PRINCIPAL = "--principal";
> +   protected final String QUEUE = "--queue";
> +
> ++  protected final String USE_HAS = "--use-has";
> ++
> +   /**
> +    * This is the canonical list of spark-submit options. Each entry in
> the array contains the
> +    * different aliases for the same option; the first element of each
> entry is the "official"
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/thrift/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/thrift/README.md b/has/supports/thrift/README.md
> new file mode 100644
> index 0000000..db49d38
> --- /dev/null
> +++ b/has/supports/thrift/README.md
> @@ -0,0 +1,70 @@
> +Enable Thrift
> +================
> +
> +## 1. Enable HBase thrift2 server
> +
> +### Update hbase-site.xml
> +add the following properties:
> +```
> +<property>
> +  <name>hbase.thrift.keytab.file</name>
> +  <value>/etc/hbase/conf/hbase.keytab</value>
> +</property>
> +<property>
> +  <name>hbase.thrift.kerberos.principal</name>
> +  <value>hbase/_HOST@HADOOP.COM</value>
> +</property>
> +```
> +
> +### Restart HBase
> +
> +### Start thrift server
> +```
> +hbase thrift2 start
> +```
> +
> +## 2. Write thrift client application
> +Use keytab file to connect thrift server.
> +An example of thrift client:
> +```Java
> +package com.example.thrifttest;
> +
> +import org.apache.hadoop.hbase.thrift.generated.Hbase;
> +import org.apache.hadoop.security.UserGroupInformation;
> +import org.apache.thrift.TException;
> +import org.apache.thrift.protocol.TBinaryProtocol;
> +import org.apache.thrift.protocol.TProtocol;
> +import org.apache.thrift.transport.TSocket;
> +import org.apache.thrift.transport.TTransport;
> +import org.apache.thrift.transport.TTransportException;
> +import java.io.IOException;
> +
> +public class Thrifttest {
> +    static {
> +        final String principal = "hbase/hostname@HADOOP.COM";
> +        final String keyTab = "/etc/hbase/conf/hbase.keytab";
> +        try {
> +            UserGroupInformation.loginUserFromKeytab(user, keyPath);
> +        } catch (IOException e) {
> +            e.printStackTrace();
> +        }
> +    }
> +
> +    private void start()  {
> +        try {
> +            TTransport socket = new TSocket("192.168.x.xxx", 9090);
> +            TProtocol protocol = new TBinaryProtocol(socket, true, true);
> +            Hbase.Client client = new Hbase.Client(protocol);
> +        } catch (TTransportException e) {
> +            e.printStackTrace();
> +        } catch (TException e) {
> +            e.printStackTrace();
> +        }
> +    }
> +
> +    public static void main(String[] args) {
> +        Thrifttest c = new Thrifttest();
> +        c.start();
> +    }
> +}
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/zookeeper/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/zookeeper/README.md b/has/supports/zookeeper/
> README.md
> new file mode 100644
> index 0000000..edc7a0e
> --- /dev/null
> +++ b/has/supports/zookeeper/README.md
> @@ -0,0 +1,59 @@
> +Enable ZooKeeper
> +===================
> +
> +## 1. Create the dependency jars
> +```
> +cd HAS/supports/zookeeper
> +mvn clean package
> +```
> +
> +## 2. Copy the jars to ZooKeeper lib directory
> +```
> +cp HAS/supports/zookeeper/lib/* $ZOOKEEPER_HOME/lib/
> +```
> +
> +## 3. Copy the conf file to ZooKeeper conf directory
> +```
> +cp HAS/supports/zookeeper/conf/* $ZOOKEEPER_HOME/conf/
> +```
> +
> +## 4. Update Zookeeper security configuration files
> +> Update $ZOO_CONF_DIR/jaas.conf
> +> Replace "_HOST" with the specific hostname for each host
> +```
> +Server {
> +  com.sun.security.auth.module.Krb5LoginModule required
> +  useKeyTab=true
> +  keyTab="/path/to/zookeeper.keytab"
> +  storeKey=true
> +  useTicketCache=true
> +  principal="zookeeper/_HOST@HADOOP.COM";
> +};
> +
> +Client {
> +  com.sun.security.auth.module.Krb5LoginModule required
> +  useKeyTab=true
> +  keyTab="/home/hdfs/keytab/hbase.keytab"
> +  storeKey=true
> +  useTicketCache=false
> +  principal="zookeeper/_HOST@HADOOP.COM";
> +};
> +```
> +
> +> Update conf/zoo.cfg
> +```
> +authProvider.1=org.apache.zookeeper.server.auth.
> SASLAuthenticationProvider
> +jaasLoginRenew=3600000
> +kerberos.removeHostFromPrincipal=true
> +kerberos.removeRealmFromPrincipal=true
> +```
> +
> +## 5. Verifying the configuration
> +```
> +zkCli.sh -server hostname:port
> +create /znode1 data sasl:zookeeper:cdwra
> +getAcl /znode1
> +```
> +
> +> The results from getAcl should show that the proper scheme and
> permissions were applied to the znode.
> +> like: 'sasl,'zookeeper
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/zookeeper/conf/jaas.conf
> ----------------------------------------------------------------------
> diff --git a/has/supports/zookeeper/conf/jaas.conf
> b/has/supports/zookeeper/conf/jaas.conf
> new file mode 100644
> index 0000000..62db69a
> --- /dev/null
> +++ b/has/supports/zookeeper/conf/jaas.conf
> @@ -0,0 +1,13 @@
> + Server {
> +      com.sun.security.auth.module.Krb5LoginModule required
> +      useKeyTab=true
> +      keyTab="/etc/zookeeper/zookeeper.keytab"
> +      storeKey=true
> +      useTicketCache=true
> +      principal="zookeeper/localhost@HADOOP.COM";
> +  };
> +
> +Client {
> +  org.apache.hadoop.has.client.HasLoginModule required
> +  useTgtTicket=true;
> +};
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/zookeeper/conf/java.env
> ----------------------------------------------------------------------
> diff --git a/has/supports/zookeeper/conf/java.env
> b/has/supports/zookeeper/conf/java.env
> new file mode 100644
> index 0000000..bb7098b
> --- /dev/null
> +++ b/has/supports/zookeeper/conf/java.env
> @@ -0,0 +1 @@
> +export JVMFLAGS="-Djava.security.auth.login.config=$ZOOKEEPER_
> HOME/conf/jaas.conf"
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/zookeeper/pom.xml
> ----------------------------------------------------------------------
> diff --git a/has/supports/zookeeper/pom.xml b/has/supports/zookeeper/pom.
> xml
> new file mode 100644
> index 0000000..d2cdc13
> --- /dev/null
> +++ b/has/supports/zookeeper/pom.xml
> @@ -0,0 +1,47 @@
> +<?xml version="1.0" encoding="UTF-8"?>
> +<project xmlns="http://maven.apache.org/POM/4.0.0"
> +         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
> +         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
> http://maven.apache.org/xsd/maven-4.0.0.xsd">
> +  <modelVersion>4.0.0</modelVersion>
> +
> +  <parent>
> +    <groupId>org.apache.hadoop</groupId>
> +    <artifactId>has-project</artifactId>
> +    <version>1.0.0-SNAPSHOT</version>
> +  </parent>
> +
> +  <artifactId>zookeeper-dist</artifactId>
> +  <description>ZooKeeper dist</description>
> +  <name>ZooKeeper dist</name>
> +
> +  <dependencies>
> +    <dependency>
> +      <groupId>org.apache.hadoop</groupId>
> +      <artifactId>has-client</artifactId>
> +      <version>${project.version}</version>
> +    </dependency>
> +  </dependencies>
> +
> +  <build>
> +    <plugins>
> +      <plugin>
> +        <groupId>org.apache.maven.plugins</groupId>
> +        <artifactId>maven-dependency-plugin</artifactId>
> +        <executions>
> +          <execution>
> +            <id>copy</id>
> +            <phase>package</phase>
> +            <goals>
> +              <goal>copy-dependencies</goal>
> +            </goals>
> +            <configuration>
> +              <outputDirectory>lib</outputDirectory>
> +            </configuration>
> +          </execution>
> +        </executions>
> +      </plugin>
> +    </plugins>
> +  </build>
> +
> +
> +</project>
>
>


-- 
Colm O hEigeartaigh

Talend Community Coder
http://coders.talend.com

[08/10] directory-kerby git commit: Add the HAS project to Kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginModule.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginModule.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginModule.java
new file mode 100644
index 0000000..6c71236
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginModule.java
@@ -0,0 +1,491 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.has.client;
+
+import com.sun.security.auth.module.Krb5LoginModule;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.kerby.kerberos.kerb.ccache.Credential;
+import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import sun.security.jgss.krb5.Krb5Util;
+import sun.security.krb5.Credentials;
+import sun.security.krb5.KrbException;
+import sun.security.krb5.PrincipalName;
+
+import javax.security.auth.DestroyFailedException;
+import javax.security.auth.Subject;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.kerberos.KerberosPrincipal;
+import javax.security.auth.kerberos.KerberosTicket;
+import javax.security.auth.login.LoginException;
+import javax.security.auth.spi.LoginModule;
+import java.io.IOException;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Login with tgt ticket
+ * The client's TGT will be retrieved from the API of HasClient
+ */
+//CHECKSTYLE.OFF
+public class HasLoginModule implements LoginModule {
+
+    public static final Logger LOG = LoggerFactory.getLogger(HasLoginModule.class);
+
+    Krb5LoginModule krb5LoginModule;
+
+    // initial state
+    private Subject subject;
+    private CallbackHandler callbackHandler;
+    private Map<String, Object> sharedState;
+    private Map<String, ?> options;
+
+    // configurable option
+    private boolean debug = false;
+    private boolean doNotPrompt = false;
+    private boolean useTgtTicket = false;
+    private String hadoopSecurityHas = null;
+    private String princName = null;
+
+    private boolean refreshKrb5Config = false;
+
+    // specify if initiator.
+    // perform authentication exchange if initiator
+    private boolean isInitiator = true;
+
+    // the authentication status
+    private boolean succeeded = false;
+    private boolean commitSucceeded = false;
+
+    private Credentials cred = null;
+
+    private PrincipalName principal = null;
+    private KerberosPrincipal kerbClientPrinc = null;
+    private KerberosTicket kerbTicket = null;
+    private StringBuffer krb5PrincName = null;
+    private boolean unboundServer = false;
+
+    /**
+     * Initialize this <code>LoginModule</code>.
+     * <p>
+     * <p>
+     *
+     * @param subject         the <code>Subject</code> to be authenticated. <p>
+     * @param callbackHandler a <code>CallbackHandler</code> for
+     *                        communication with the end user (prompting for
+     *                        usernames and passwords, for example). <p>
+     * @param sharedState     shared <code>LoginModule</code> state. <p>
+     * @param options         options specified in the login
+     *                        <code>Configuration</code> for this particular
+     *                        <code>LoginModule</code>.
+     */
+    public void initialize(Subject subject,
+                           CallbackHandler callbackHandler,
+                           Map<String, ?> sharedState,
+                           Map<String, ?> options) {
+
+        this.subject = subject;
+        this.callbackHandler = callbackHandler;
+        this.sharedState = (Map<String, Object>) sharedState;
+        this.options = options;
+
+        // initialize any configured options
+        useTgtTicket = "true".equalsIgnoreCase((String) options.get("useTgtTicket"));
+
+        if (useTgtTicket) {
+            debug = "true".equalsIgnoreCase((String) options.get("debug"));
+            doNotPrompt = "true".equalsIgnoreCase((String) options.get("doNotPrompt"));
+            useTgtTicket = "true".equalsIgnoreCase((String) options.get("useTgtTicket"));
+            hadoopSecurityHas = (String) options.get("hadoopSecurityHas");
+            princName = (String) options.get("principal");
+            refreshKrb5Config =
+                "true".equalsIgnoreCase((String) options.get("refreshKrb5Config"));
+
+            // check isInitiator value
+            String isInitiatorValue = ((String) options.get("isInitiator"));
+            if (isInitiatorValue != null) {
+                // use default, if value not set
+                isInitiator = "true".equalsIgnoreCase(isInitiatorValue);
+            }
+
+            if (debug) {
+                System.out.print("Debug is  " + debug
+                    + " doNotPrompt " + doNotPrompt
+                    + " isInitiator " + isInitiator
+                    + " refreshKrb5Config is " + refreshKrb5Config
+                    + " principal is " + princName + "\n");
+            }
+        } else {
+            krb5LoginModule = new Krb5LoginModule();
+            krb5LoginModule.initialize(subject, callbackHandler, sharedState, options);
+        }
+    }
+
+    /**
+     * Authenticate the user
+     * <p>
+     * <p>
+     *
+     * @return true in all cases since this <code>LoginModule</code>
+     * should not be ignored.
+     * @throws LoginException       if this <code>LoginModule</code>
+     *                              is unable to perform the authentication.
+     */
+    public boolean login() throws LoginException {
+
+        if (useTgtTicket) {
+            if (refreshKrb5Config) {
+                try {
+                    if (debug) {
+                        System.out.println("Refreshing Kerberos configuration");
+                    }
+                    sun.security.krb5.Config.refresh();
+                } catch (KrbException ke) {
+                    LoginException le = new LoginException(ke.getMessage());
+                    le.initCause(ke);
+                    throw le;
+                }
+            }
+            String principalProperty = System.getProperty("sun.security.krb5.principal");
+            if (principalProperty != null) {
+                krb5PrincName = new StringBuffer(principalProperty);
+            } else {
+                if (princName != null) {
+                    krb5PrincName = new StringBuffer(princName);
+                }
+            }
+
+            validateConfiguration();
+
+            if (krb5PrincName != null && krb5PrincName.toString().equals("*")) {
+                unboundServer = true;
+            }
+
+            // attempt the authentication by getting the username and pwd
+            // by prompting or configuration i.e. not from shared state
+
+            try {
+                attemptAuthentication(false);
+                succeeded = true;
+                cleanState();
+                return true;
+            } catch (LoginException e) {
+                // authentication failed -- clean out state
+                if (debug) {
+                    System.out.println("\t\t[HasLoginModule] "
+                        + "authentication failed \n"
+                        + e.getMessage());
+                }
+                succeeded = false;
+                cleanState();
+                throw e;
+            }
+        } else {
+            succeeded = krb5LoginModule.login();
+            return succeeded;
+        }
+    }
+
+    /**
+     * Process the configuration options
+     * Get the TGT from Has Client
+     */
+
+    private void attemptAuthentication(boolean getPasswdFromSharedState)
+        throws LoginException {
+
+        /*
+         * Check the creds cache to see whether
+         * we have TGT for this client principal
+         */
+        if (krb5PrincName != null) {
+            try {
+                principal = new PrincipalName(krb5PrincName.toString(),
+                        PrincipalName.KRB_NT_PRINCIPAL);
+            } catch (KrbException e) {
+                LoginException le = new LoginException(e.getMessage());
+                le.initCause(e);
+                throw le;
+            }
+        }
+
+        try {
+            if (useTgtTicket) {
+                if (debug) {
+                    System.out.println("use tgt ticket to login, acquire TGT TICKET...");
+                }
+
+                HasClient hasClient = new HasClient(hadoopSecurityHas);
+                TgtTicket tgtTicket = null;
+                try {
+                    tgtTicket = hasClient.requestTgt();
+                } catch (HasException e) {
+                    LoginException le = new LoginException(e.getMessage());
+                    le.initCause(e);
+                    throw le;
+                }
+                Credential credential = new Credential(tgtTicket);
+                boolean[] flags = new boolean[7];
+                int flag = credential.getTicketFlags().getFlags();
+                for (int i = 6; i >= 0; i--) {
+                    flags[i] = (flag & (1 << i)) != 0;
+                }
+                Date startTime = null;
+                if (credential.getStartTime() != null) {
+                    startTime = credential.getStartTime().getValue();
+                }
+                cred = new Credentials(credential.getTicket().encode(),
+                    credential.getClientName().getName(),
+                    credential.getServerName().getName(),
+                    credential.getKey().getKeyData(),
+                    credential.getKey().getKeyType().getValue(),
+                    flags,
+                    credential.getAuthTime().getValue(),
+                    startTime,
+                    credential.getEndTime().getValue(),
+                    credential.getRenewTill().getValue(),
+                    null);
+
+                if (cred != null) {
+                    // get the principal name from the ticket cache
+                    if (principal == null) {
+                        principal = cred.getClient();
+                    }
+                }
+                if (debug) {
+                    System.out.println("Principal is " + principal);
+                    if (cred == null) {
+                        System.out.println("null credentials from TGT Ticket");
+                    }
+                }
+            }
+        } catch (KrbException e) {
+            LoginException le = new LoginException(e.getMessage());
+            le.initCause(e);
+            throw le;
+        } catch (IOException ioe) {
+            LoginException ie = new LoginException(ioe.getMessage());
+            ie.initCause(ioe);
+            throw ie;
+        }
+    }
+
+    private void validateConfiguration() throws LoginException {
+        if (doNotPrompt && !useTgtTicket) {
+            throw new LoginException("Configuration Error"
+                + " - either doNotPrompt should be "
+                + " false or"
+                + " useTgtTicket"
+                + " should be true");
+        }
+
+        if (krb5PrincName != null && krb5PrincName.toString().equals("*")) {
+            if (isInitiator) {
+                throw new LoginException("Configuration Error"
+                        + " - principal cannot be * when isInitiator is true");
+            }
+        }
+    }
+
+    /**
+     * <p> This method is called if the LoginContext's
+     * overall authentication succeeded
+     *
+     * @return true if this LoginModule's own login and commit
+     * attempts succeeded, or false otherwise.
+     * @throws LoginException if the commit fails.
+     */
+
+    public boolean commit() throws LoginException {
+        if (debug) {
+            System.out.println("Login success? " + succeeded);
+        }
+
+        if (useTgtTicket) {
+        /*
+         * Let us add the Krb5 Creds to the Subject's
+         * private credentials. The credentials are of type
+         * KerberosKey or KerberosTicket
+         */
+            if (succeeded == false) {
+                return false;
+            } else {
+
+                if (isInitiator && (cred == null)) {
+                    succeeded = false;
+                    throw new LoginException("Null Client Credential");
+                }
+
+                if (subject.isReadOnly()) {
+                    cleanKerberosCred();
+                    throw new LoginException("Subject is Readonly");
+                }
+
+            /*
+             * Add the Principal (authenticated identity)
+             * to the Subject's principal set and
+             * add the credentials (TGT or Service key) to the
+             * Subject's private credentials
+             */
+
+                Set<Object> privCredSet = subject.getPrivateCredentials();
+                Set<java.security.Principal> princSet = subject.getPrincipals();
+                kerbClientPrinc = new KerberosPrincipal(principal.getName());
+
+                // create Kerberos Ticket
+                if (isInitiator) {
+                    kerbTicket = Krb5Util.credsToTicket(cred);
+                }
+
+                // Let us add the kerbClientPrinc,kerbTicket
+
+                // We won't add "*" as a KerberosPrincipal
+                if (!unboundServer
+                    && !princSet.contains(kerbClientPrinc)) {
+                    princSet.add(kerbClientPrinc);
+                }
+
+                // add the TGT
+                if (kerbTicket != null) {
+                    if (!privCredSet.contains(kerbTicket)) {
+                        privCredSet.add(kerbTicket);
+                    }
+                }
+            }
+            commitSucceeded = true;
+            if (debug) {
+                System.out.println("Commit Succeeded \n");
+            }
+            return true;
+        } else {
+            return krb5LoginModule.commit();
+        }
+    }
+
+    /**
+     * <p> This method is called if the LoginContext's
+     * overall authentication failed.
+     *
+     * @return false if this LoginModule's own login and/or commit attempts
+     * failed, and true otherwise.
+     * @throws LoginException if the abort fails.
+     */
+
+    public boolean abort() throws LoginException {
+        if (useTgtTicket) {
+            if (succeeded == false) {
+                return false;
+            } else if (succeeded == true && commitSucceeded == false) {
+                // login succeeded but overall authentication failed
+                succeeded = false;
+                cleanKerberosCred();
+            } else {
+                // overall authentication succeeded and commit succeeded,
+                // but someone else's commit failed
+                logout();
+            }
+            return true;
+        } else {
+            return krb5LoginModule.abort();
+        }
+    }
+
+    /**
+     * Logout the user.
+     * <p>
+     * <p> This method removes the <code>Krb5Principal</code>
+     * that was added by the <code>commit</code> method.
+     * <p>
+     * <p>
+     *
+     * @return true in all cases since this <code>LoginModule</code>
+     * should not be ignored.
+     * @throws LoginException if the logout fails.
+     */
+    public boolean logout() throws LoginException {
+
+        if (useTgtTicket) {
+            if (debug) {
+                System.out.println("\t\t[Krb5LoginModule]: "
+                    + "Entering logout");
+            }
+
+            if (subject.isReadOnly()) {
+                cleanKerberosCred();
+                throw new LoginException("Subject is Readonly");
+            }
+
+            subject.getPrincipals().remove(kerbClientPrinc);
+            // Let us remove all Kerberos credentials stored in the Subject
+            Iterator<Object> it = subject.getPrivateCredentials().iterator();
+            while (it.hasNext()) {
+                Object o = it.next();
+                if (o instanceof KerberosTicket) {
+                    it.remove();
+                }
+            }
+            // clean the kerberos ticket and keys
+            cleanKerberosCred();
+
+            succeeded = false;
+            commitSucceeded = false;
+            if (debug) {
+                System.out.println("\t\t[HasLoginModule]: "
+                    + "logged out Subject");
+            }
+            return true;
+        } else {
+            return krb5LoginModule.logout();
+        }
+    }
+
+    /**
+     * Clean Kerberos credentials
+     */
+    private void cleanKerberosCred() throws LoginException {
+        // Clean the ticket and server key
+        try {
+            if (kerbTicket != null) {
+                kerbTicket.destroy();
+            }
+        } catch (DestroyFailedException e) {
+            throw new LoginException("Destroy Failed on Kerberos Private Credentials");
+        }
+        kerbTicket = null;
+        kerbClientPrinc = null;
+    }
+
+    /**
+     * Clean out the state
+     */
+    private void cleanState() {
+
+        if (!succeeded) {
+            // remove temp results for the next try
+            principal = null;
+        }
+        if (krb5PrincName != null && krb5PrincName.length() != 0) {
+            krb5PrincName.delete(0, krb5PrincName.length());
+        }
+        krb5PrincName = null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-client/src/main/resources/ssl-client.conf.template
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/resources/ssl-client.conf.template b/has/has-client/src/main/resources/ssl-client.conf.template
new file mode 100644
index 0000000..c5ca70a
--- /dev/null
+++ b/has/has-client/src/main/resources/ssl-client.conf.template
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+ssl.client.truststore.location = _location_
+ssl.client.truststore.password = _password_

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-common/pom.xml b/has/has-common/pom.xml
new file mode 100644
index 0000000..3046871
--- /dev/null
+++ b/has/has-common/pom.xml
@@ -0,0 +1,67 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.hadoop</groupId>
+    <artifactId>has-project</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>has-common</artifactId>
+  <description>HAS common</description>
+  <name>HAS common</name>
+  <packaging>jar</packaging>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>token-provider</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerby-config</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-client-api-all</artifactId>
+      <version>${kerby.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <version>3.0.0-alpha2</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-codec</groupId>
+      <artifactId>commons-codec</artifactId>
+      <version>1.4</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <version>22.0-rc1-android</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>1.2</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>${slf4j.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>4.12</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/HasAdmin.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasAdmin.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasAdmin.java
new file mode 100644
index 0000000..94dc5df
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasAdmin.java
@@ -0,0 +1,140 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License. 
+ *
+ */
+package org.apache.hadoop.has.common;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * Server side admin facilities from remote, similar to MIT kadmin remote mode.
+ */
+public interface HasAdmin {
+
+    /**
+     * Get the hadmin principal name.
+     *
+     * @return The hadmin principal name.
+     */
+    String getHadminPrincipal();
+
+    /**
+     * Add principal to backend.
+     *
+     * @param principal The principal to be added into backend
+     * @throws HasException e
+     */
+    void addPrincipal(String principal) throws HasException;
+
+    /**
+     * Add principal to backend.
+     *
+     * @param principal The principal to be added into backend
+     * @param password  The password to create encryption key
+     * @throws HasException e
+     */
+    void addPrincipal(String principal, String password) throws HasException;
+
+    /**
+     * Export all the keys of the specified principal into the specified keytab
+     * file.
+     *
+     * @param keytabFile The keytab file
+     * @param principal The principal name
+     * @throws HasException e
+     */
+    void exportKeytab(File keytabFile, String principal) throws HasException;
+
+    /**
+     * Export all the keys of the specified principals into the specified keytab
+     * file.
+     *
+     * @param keytabFile The keytab file
+     * @param principals The principal names
+     * @throws HasException e
+     */
+    void exportKeytab(File keytabFile, List<String> principals) throws HasException;
+
+    /**
+     * Delete the principal in backend.
+     *
+     * @param principal The principal to be deleted from backend
+     * @throws HasException e
+     */
+    void deletePrincipal(String principal) throws HasException;
+
+    /**
+     * Rename the principal.
+     *
+     * @param oldPrincipalName The original principal name
+     * @param newPrincipalName The new principal name
+     * @throws HasException e
+     */
+    void renamePrincipal(String oldPrincipalName,
+                         String newPrincipalName) throws HasException;
+
+    /**
+     * Get all the principal names from backend.
+     *
+     * @return principal list
+     * @throws HasException e
+     */
+    List<String> getPrincipals() throws HasException;
+
+    /**
+     * Get all the principal names that meets the pattern
+     *
+     * @param globString The glob string for matching
+     * @return Principal names
+     * @throws HasException e
+     */
+    List<String> getPrincipals(String globString) throws HasException;
+
+    /**
+     * Change the password of specified principal.
+     *
+     * @param principal The principal to be updated password
+     * @param newPassword The new password
+     * @throws HasException e
+     */
+//    void changePassword(String principal, String newPassword) throws HasException;
+
+    /**
+     * Update the random keys of specified principal.
+     *
+     * @param principal The principal to be updated keys
+     * @throws HasException e
+     */
+//    void updateKeys(String principal) throws HasException;
+
+    /**
+     * Release any resources associated.
+     *
+     * @throws HasException e
+     */
+//    void release() throws HasException;
+
+    String addPrincByRole(String host, String role) throws HasException;
+
+    File getKeytabByHostAndRole(String host, String role) throws HasException;
+
+    int size() throws HasException;
+
+    void setEnableOfConf(String isEnable) throws HasException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfig.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfig.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfig.java
new file mode 100644
index 0000000..3fc0998
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfig.java
@@ -0,0 +1,103 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.common;
+
+import org.apache.kerby.kerberos.kerb.common.Krb5Conf;
+
+import java.io.File;
+
+/**
+ * AK configuration API.
+ */
+public class HasConfig extends Krb5Conf {
+    private File confDir;
+
+    public void setConfDir(File dir) {
+        this.confDir = dir;
+    }
+
+    public File getConfDir() {
+        return confDir;
+    }
+
+    public String getHttpsHost() {
+        return getString(HasConfigKey.HTTPS_HOST, false, "HAS");
+    }
+
+    public String getHttpsPort() {
+        return getString(HasConfigKey.HTTPS_PORT, false, "HAS");
+    }
+
+    public String getHttpHost() {
+        return getString(HasConfigKey.HTTP_HOST, false, "HAS");
+    }
+
+    public String getHttpPort() {
+        return getString(HasConfigKey.HTTP_PORT, false, "HAS");
+    }
+
+    public String getPluginName() {
+        return getString(HasConfigKey.AUTH_TYPE, true, "PLUGIN");
+    }
+
+    public String getRealm() {
+        return getString(HasConfigKey.REALM, false, "HAS");
+    }
+
+    public String getSslServerConf() {
+        return getString(HasConfigKey.SSL_SERVER_CONF, true, "HAS");
+    }
+
+    public String getSslClientConf() {
+        return getString(HasConfigKey.SSL_CLIENT_CONF, true, "HAS");
+    }
+
+    public String getFilterAuthType() {
+        return getString(HasConfigKey.FILTER_AUTH_TYPE, true, "HAS");
+    }
+
+    public String getKerberosPrincipal() {
+        return getString(HasConfigKey.KERBEROS_PRINCIPAL, false, "HAS");
+    }
+
+    public String getKerberosKeytab() {
+        return getString(HasConfigKey.KERBEROS_KEYTAB, false, "HAS");
+    }
+
+    public String getKerberosNameRules() {
+        return getString(HasConfigKey.KERBEROS_NAME_RULES, false, "HAS");
+    }
+
+    public String getAdminKeytab() {
+        return getString(HasConfigKey.ADMIN_KEYTAB, false, "HAS");
+    }
+
+    public String getAdminKeytabPrincipal() {
+        return getString(HasConfigKey.ADMIN_KEYTAB_PRINCIPAL, false, "HAS");
+    }
+
+    public String getEnableConf() {
+        return getString(HasConfigKey.ENABLE_CONF, false, "HAS");
+    }
+
+    public String getSslClientCert() {
+        return getString(HasConfigKey.SSL_CLIENT_CERT, true, "HAS");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfigKey.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfigKey.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfigKey.java
new file mode 100644
index 0000000..07db8d4
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasConfigKey.java
@@ -0,0 +1,61 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.common;
+
+import org.apache.kerby.config.ConfigKey;
+
+public enum HasConfigKey implements ConfigKey {
+    HTTP_HOST,
+    HTTP_PORT,
+    HTTPS_HOST,
+    HTTPS_PORT,
+    AUTH_TYPE("RAM"),
+    REALM,
+    ENABLE_CONF,
+    SSL_SERVER_CONF("/etc/has/ssl-server.conf"),
+    SSL_CLIENT_CONF("/etc/has/ssl-client.conf"),
+    SSL_CLIENT_CERT("/etc/has/cert-signed"),
+    FILTER_AUTH_TYPE("kerberos"),
+    KERBEROS_PRINCIPAL,
+    KERBEROS_KEYTAB,
+    KERBEROS_NAME_RULES,
+    ADMIN_KEYTAB,
+    ADMIN_KEYTAB_PRINCIPAL;
+
+    private Object defaultValue;
+
+    HasConfigKey() {
+        this.defaultValue = null;
+    }
+
+    HasConfigKey(Object defaultValue) {
+        this.defaultValue = defaultValue;
+    }
+
+    @Override
+    public String getPropertyKey() {
+        return name().toLowerCase();
+    }
+
+    @Override
+    public Object getDefaultValue() {
+        return this.defaultValue;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/HasException.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/HasException.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasException.java
new file mode 100644
index 0000000..f8fc3b3
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/HasException.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.has.common;
+
+public class HasException extends Exception {
+
+    private static final long serialVersionUID = -1916788959202646914L;
+
+    /**
+     * Creates an {@link HasException}.
+     *
+     * @param cause original exception.
+     */
+    public HasException(Throwable cause) {
+        super(cause);
+    }
+
+    /**
+     * Creates an {@link HasException}.
+     *
+     * @param message exception message.
+     */
+    public HasException(String message) {
+        super(message);
+    }
+
+    /**
+     * Creates an {@link HasException}.
+     *
+     * @param message exception message.
+     * @param cause   original exception.
+     */
+    public HasException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthToken.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthToken.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthToken.java
new file mode 100644
index 0000000..c7a18da
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthToken.java
@@ -0,0 +1,217 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.hadoop.has.common.spnego;
+
+import java.security.Principal;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.StringTokenizer;
+
+/**
+ * Borrow the class from Apache hadoop
+ */
+public class AuthToken implements Principal {
+
+  /**
+   * Constant that identifies an anonymous request.
+   */
+
+  private static final String ATTR_SEPARATOR = "&";
+  private static final String USER_NAME = "u";
+  private static final String PRINCIPAL = "p";
+  private static final String EXPIRES = "e";
+  private static final String TYPE = "t";
+
+  private static final Set<String> ATTRIBUTES =
+    new HashSet<String>(Arrays.asList(USER_NAME, PRINCIPAL, EXPIRES, TYPE));
+
+  private String userName;
+  private String principal;
+  private String type;
+  private long expires;
+  private String tokenStr;
+
+  protected AuthToken() {
+    userName = null;
+    principal = null;
+    type = null;
+    expires = -1;
+    tokenStr = "ANONYMOUS";
+    generateToken();
+  }
+
+  private static final String ILLEGAL_ARG_MSG = " is NULL, empty or contains a '" + ATTR_SEPARATOR + "'";
+
+  /**
+   * Creates an authentication token.
+   *
+   * @param userName user name.
+   * @param principal principal (commonly matches the user name, with Kerberos is the full/long principal
+   * name while the userName is the short name).
+   * @param type the authentication mechanism name.
+   * (<code>System.currentTimeMillis() + validityPeriod</code>).
+   */
+  public AuthToken(String userName, String principal, String type) {
+    checkForIllegalArgument(userName, "userName");
+    checkForIllegalArgument(principal, "principal");
+    checkForIllegalArgument(type, "type");
+    this.userName = userName;
+    this.principal = principal;
+    this.type = type;
+    this.expires = -1;
+  }
+  
+  /**
+   * Check if the provided value is invalid. Throw an error if it is invalid, NOP otherwise.
+   * 
+   * @param value the value to check.
+   * @param name the parameter name to use in an error message if the value is invalid.
+   */
+  protected static void checkForIllegalArgument(String value, String name) {
+    if (value == null || value.length() == 0 || value.contains(ATTR_SEPARATOR)) {
+      throw new IllegalArgumentException(name + ILLEGAL_ARG_MSG);
+    }
+  }
+
+  /**
+   * Sets the expiration of the token.
+   *
+   * @param expires expiration time of the token in milliseconds since the epoch.
+   */
+  public void setExpires(long expires) {
+    this.expires = expires;
+      generateToken();
+  }
+
+  /**
+   * Returns true if the token has expired.
+   *
+   * @return true if the token has expired.
+   */
+  public boolean isExpired() {
+    return getExpires() != -1 && System.currentTimeMillis() > getExpires();
+  }
+
+  /**
+   * Generates the token.
+   */
+  private void generateToken() {
+    StringBuffer sb = new StringBuffer();
+    sb.append(USER_NAME).append("=").append(getUserName()).append(ATTR_SEPARATOR);
+    sb.append(PRINCIPAL).append("=").append(getName()).append(ATTR_SEPARATOR);
+    sb.append(TYPE).append("=").append(getType()).append(ATTR_SEPARATOR);
+    sb.append(EXPIRES).append("=").append(getExpires());
+    tokenStr = sb.toString();
+  }
+
+  /**
+   * Returns the user name.
+   *
+   * @return the user name.
+   */
+  public String getUserName() {
+    return userName;
+  }
+
+  /**
+   * Returns the principal name (this method name comes from the JDK {@link Principal} interface).
+   *
+   * @return the principal name.
+   */
+  @Override
+  public String getName() {
+    return principal;
+  }
+
+  /**
+   * Returns the authentication mechanism of the token.
+   *
+   * @return the authentication mechanism of the token.
+   */
+  public String getType() {
+    return type;
+  }
+
+  /**
+   * Returns the expiration time of the token.
+   *
+   * @return the expiration time of the token, in milliseconds since Epoc.
+   */
+  public long getExpires() {
+    return expires;
+  }
+
+  /**
+   * Returns the string representation of the token.
+   * <p>
+   * This string representation is parseable by the {@link #parse} method.
+   *
+   * @return the string representation of the token.
+   */
+  @Override
+  public String toString() {
+    return tokenStr;
+  }
+
+  public static AuthToken parse(String tokenStr) throws AuthenticationException {
+    if (tokenStr.length() >= 2) {
+      // strip the \" at the two ends of the tokenStr
+      if (tokenStr.charAt(0) == '\"'
+          && tokenStr.charAt(tokenStr.length() - 1) == '\"') {
+        tokenStr = tokenStr.substring(1, tokenStr.length() - 1);
+      }
+    }
+    Map<String, String> map = split(tokenStr);
+    // remove the signature part, since client doesn't care about it
+    map.remove("s");
+
+    if (!map.keySet().equals(ATTRIBUTES)) {
+      throw new AuthenticationException("Invalid token string, missing attributes");
+    }
+    long expires = Long.parseLong(map.get(EXPIRES));
+    AuthToken token = new AuthToken(map.get(USER_NAME), map.get(PRINCIPAL), map.get(TYPE));
+    token.setExpires(expires);
+    return token;
+  }
+
+  /**
+   * Splits the string representation of a token into attributes pairs.
+   *
+   * @param tokenStr string representation of a token.
+   *
+   * @return a map with the attribute pairs of the token.
+   *
+   * @throws AuthenticationException thrown if the string representation of the token could not be broken into
+   * attribute pairs.
+   */
+  private static Map<String, String> split(String tokenStr) throws AuthenticationException {
+    Map<String, String> map = new HashMap<String, String>();
+    StringTokenizer st = new StringTokenizer(tokenStr, ATTR_SEPARATOR);
+    while (st.hasMoreTokens()) {
+      String part = st.nextToken();
+      int separator = part.indexOf('=');
+      if (separator == -1) {
+        throw new AuthenticationException("Invalid authentication token");
+      }
+      String key = part.substring(0, separator);
+      String value = part.substring(separator + 1);
+      map.put(key, value);
+    }
+    return map;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticatedURL.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticatedURL.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticatedURL.java
new file mode 100644
index 0000000..ccd7ea4
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticatedURL.java
@@ -0,0 +1,282 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.hadoop.has.common.spnego;
+
+import org.apache.hadoop.has.common.util.ConnectionConfigurator;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * <p>
+ * The authentication mechanisms supported by default are Hadoop Simple  authentication
+ * (also known as pseudo authentication) and Kerberos SPNEGO authentication.
+ * <p>
+ * Additional authentication mechanisms can be supported via {@link Authenticator} implementations.
+ * <p>
+ * The default {@link Authenticator} is the {@link KerberosAuthenticator} class which supports
+ * automatic fallback from Kerberos SPNEGO to Hadoop Simple authentication.
+ * <p>
+ * <code>AuthenticatedURL</code> instances are not thread-safe.
+ * <p>
+ * The usage pattern of the {@link AuthenticatedURL} is:
+ * <pre>
+ *
+ * // establishing an initial connection
+ *
+ * URL url = new URL("http://foo:8080/bar");
+ * AuthenticatedURL.Token token = new AuthenticatedURL.Token();
+ * AuthenticatedURL aUrl = new AuthenticatedURL();
+ * HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
+ * ....
+ * // use the 'conn' instance
+ * ....
+ *
+ * // establishing a follow up connection using a token from the previous connection
+ *
+ * HttpURLConnection conn = new AuthenticatedURL(url, token).openConnection();
+ * ....
+ * // use the 'conn' instance
+ * ....
+ *
+ * </pre>
+ */
+public class AuthenticatedURL {
+
+  /**
+   * Name of the HTTP cookie used for the authentication token between the client and the server.
+   */
+  public static final String AUTH_COOKIE = "hadoop.auth";
+
+  private static final String AUTH_COOKIE_EQ = AUTH_COOKIE + "=";
+
+  /**
+   * Client side authentication token.
+   */
+  public static class Token {
+
+    private String token;
+
+    /**
+     * Creates a token.
+     */
+    public Token() {
+    }
+
+    /**
+     * Creates a token using an existing string representation of the token.
+     *
+     * @param tokenStr string representation of the tokenStr.
+     */
+    public Token(String tokenStr) {
+      if (tokenStr == null) {
+        throw new IllegalArgumentException("tokenStr cannot be null");
+      }
+      set(tokenStr);
+    }
+
+    /**
+     * Returns if a token from the server has been set.
+     *
+     * @return if a token from the server has been set.
+     */
+    public boolean isSet() {
+      return token != null;
+    }
+
+    /**
+     * Sets a token.
+     *
+     * @param tokenStr string representation of the tokenStr.
+     */
+    void set(String tokenStr) {
+      token = tokenStr;
+    }
+
+    /**
+     * Returns the string representation of the token.
+     *
+     * @return the string representation of the token.
+     */
+    @Override
+    public String toString() {
+      return token;
+    }
+
+  }
+
+  private static Class<? extends Authenticator> defaultAuthenticator
+      = KerberosAuthenticator.class;
+
+  /**
+   * Sets the default {@link Authenticator} class to use when an {@link AuthenticatedURL} instance
+   * is created without specifying an authenticator.
+   *
+   * @param authenticator the authenticator class to use as default.
+   */
+  public static void setDefaultAuthenticator(Class<? extends Authenticator> authenticator) {
+    defaultAuthenticator = authenticator;
+  }
+
+  /**
+   * Returns the default {@link Authenticator} class to use when an {@link AuthenticatedURL} instance
+   * is created without specifying an authenticator.
+   *
+   * @return the authenticator class to use as default.
+   */
+  public static Class<? extends Authenticator> getDefaultAuthenticator() {
+    return defaultAuthenticator;
+  }
+
+  private Authenticator authenticator;
+  private ConnectionConfigurator connConfigurator;
+
+  /**
+   * Creates an {@link AuthenticatedURL}.
+   */
+  public AuthenticatedURL() {
+    this(null);
+  }
+
+  /**
+   * Creates an <code>AuthenticatedURL</code>.
+   *
+   * @param authenticator the {@link Authenticator} instance to use, if <code>null</code> a {@link
+   * KerberosAuthenticator} is used.
+   */
+  public AuthenticatedURL(Authenticator authenticator) {
+    this(authenticator, null);
+  }
+
+  /**
+   * Creates an <code>AuthenticatedURL</code>.
+   *
+   * @param authenticator the {@link Authenticator} instance to use, if <code>null</code> a {@link
+   * KerberosAuthenticator} is used.
+   * @param connConfigurator a connection configurator.
+   */
+  public AuthenticatedURL(Authenticator authenticator,
+                          ConnectionConfigurator connConfigurator) {
+    try {
+      this.authenticator = (authenticator != null) ? authenticator : defaultAuthenticator.newInstance();
+    } catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+    this.connConfigurator = connConfigurator;
+    this.authenticator.setConnectionConfigurator(connConfigurator);
+  }
+
+  /**
+   * Returns the {@link Authenticator} instance used by the
+   * <code>AuthenticatedURL</code>.
+   *
+   * @return the {@link Authenticator} instance
+   */
+  protected Authenticator getAuthenticator() {
+    return authenticator;
+  }
+
+  /**
+   * Returns an authenticated {@link HttpURLConnection}.
+   *
+   * @param url the URL to connect to. Only HTTP/S URLs are supported.
+   * @param token the authentication token being used for the user.
+   *
+   * @return an authenticated {@link HttpURLConnection}.
+   *
+   * @throws IOException if an IO error occurred.
+   * @throws AuthenticationException if an authentication exception occurred.
+   */
+  public HttpURLConnection openConnection(URL url, Token token) throws IOException, AuthenticationException {
+    if (url == null) {
+      throw new IllegalArgumentException("url cannot be NULL");
+    }
+    if (!url.getProtocol().equalsIgnoreCase("http") && !url.getProtocol().equalsIgnoreCase("https")) {
+      throw new IllegalArgumentException("url must be for a HTTP or HTTPS resource");
+    }
+    if (token == null) {
+      throw new IllegalArgumentException("token cannot be NULL");
+    }
+    authenticator.authenticate(url, token);
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    if (connConfigurator != null) {
+      conn = connConfigurator.configure(conn);
+    }
+    injectToken(conn, token);
+    return conn;
+  }
+
+  /**
+   * Helper method that injects an authentication token to send with a connection.
+   *
+   * @param conn connection to inject the authentication token into.
+   * @param token authentication token to inject.
+   */
+  public static void injectToken(HttpURLConnection conn, Token token) {
+    String t = token.token;
+    if (t != null) {
+      if (!t.startsWith("\"")) {
+        t = "\"" + t + "\"";
+      }
+      conn.addRequestProperty("Cookie", AUTH_COOKIE_EQ + t);
+    }
+  }
+
+  /**
+   * Helper method that extracts an authentication token received from a connection.
+   * <p>
+   * This method is used by {@link Authenticator} implementations.
+   *
+   * @param conn connection to extract the authentication token from.
+   * @param token the authentication token.
+   *
+   * @throws IOException if an IO error occurred.
+   * @throws AuthenticationException if an authentication exception occurred.
+   */
+  public static void extractToken(HttpURLConnection conn, Token token) throws IOException, AuthenticationException {
+    int respCode = conn.getResponseCode();
+    if (respCode == HttpURLConnection.HTTP_OK
+        || respCode == HttpURLConnection.HTTP_CREATED
+        || respCode == HttpURLConnection.HTTP_ACCEPTED) {
+      Map<String, List<String>> headers = conn.getHeaderFields();
+      List<String> cookies = headers.get("Set-Cookie");
+      if (cookies != null) {
+        for (String cookie : cookies) {
+          if (cookie.startsWith(AUTH_COOKIE_EQ)) {
+            String value = cookie.substring(AUTH_COOKIE_EQ.length());
+            int separator = value.indexOf(";");
+            if (separator > -1) {
+              value = value.substring(0, separator);
+            }
+            if (value.length() > 0) {
+              token.set(value);
+            }
+          }
+        }
+      }
+    } else {
+      token.set(null);
+      throw new AuthenticationException("Authentication failed, status: " + conn.getResponseCode()
+          + ", message: " + conn.getResponseMessage());
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticationException.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticationException.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticationException.java
new file mode 100644
index 0000000..62a5d38
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/AuthenticationException.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.hadoop.has.common.spnego;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Exception thrown when an authentication error occurrs.
+ */
+public class AuthenticationException extends Exception {
+  
+  static final long serialVersionUID = 0;
+
+  /**
+   * Creates an {@link AuthenticationException}.
+   *
+   * @param cause original exception.
+   */
+  public AuthenticationException(Throwable cause) {
+    super(cause);
+  }
+
+  /**
+   * Creates an {@link AuthenticationException}.
+   *
+   * @param msg exception message.
+   */
+  public AuthenticationException(String msg) {
+    super(msg);
+  }
+
+  /**
+   * Creates an {@link AuthenticationException}.
+   *
+   * @param msg exception message.
+   * @param cause original exception.
+   */
+  public AuthenticationException(String msg, Throwable cause) {
+    super(msg, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/Authenticator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/Authenticator.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/Authenticator.java
new file mode 100644
index 0000000..91eb1a0
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/Authenticator.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.hadoop.has.common.spnego;
+
+import org.apache.hadoop.has.common.util.ConnectionConfigurator;
+
+import java.io.IOException;
+import java.net.URL;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * Interface for client authentication mechanisms.
+ * <p>
+ * Implementations are use-once instances, they don't need to be thread safe.
+ */
+public interface Authenticator {
+
+  /**
+   * Sets a {@link ConnectionConfigurator} instance to use for
+   * configuring connections.
+   *
+   * @param configurator the {@link ConnectionConfigurator} instance.
+   */
+  void setConnectionConfigurator(ConnectionConfigurator configurator);
+
+  /**
+   * Authenticates against a URL and returns a {@link AuthenticatedURL.Token} to be
+   * used by subsequent requests.
+   *
+   * @param url the URl to authenticate against.
+   * @param token the authentication token being used for the user.
+   *
+   * @throws IOException if an IO error occurred.
+   * @throws AuthenticationException if an authentication error occurred.
+   */
+  void authenticate(URL url, AuthenticatedURL.Token token) throws IOException, AuthenticationException;
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosAuthenticator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosAuthenticator.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosAuthenticator.java
new file mode 100644
index 0000000..c785430
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosAuthenticator.java
@@ -0,0 +1,359 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.hadoop.has.common.spnego;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.has.common.util.ConnectionConfigurator;
+import org.ietf.jgss.GSSContext;
+import org.ietf.jgss.GSSManager;
+import org.ietf.jgss.GSSName;
+import org.ietf.jgss.Oid;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosKey;
+import javax.security.auth.kerberos.KerberosTicket;
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.security.AccessControlContext;
+import java.security.AccessController;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.apache.hadoop.has.common.util.PlatformName.IBM_JAVA;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+/**
+ * The {@link KerberosAuthenticator} implements the Kerberos SPNEGO authentication sequence.
+ * <p>
+ * It uses the default principal for the Kerberos cache (normally set via kinit).
+ * <p>
+ */
+public class KerberosAuthenticator implements Authenticator {
+  
+  private static final Logger LOG = LoggerFactory.getLogger(KerberosAuthenticator.class);
+
+  /**
+   * HTTP header used by the SPNEGO server endpoint during an authentication sequence.
+   */
+  public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
+
+  /**
+   * HTTP header used by the SPNEGO client endpoint during an authentication sequence.
+   */
+  public static final String AUTHORIZATION = "Authorization";
+
+  /**
+   * HTTP header prefix used by the SPNEGO client/server endpoints during an authentication sequence.
+   */
+  public static final String NEGOTIATE = "Negotiate";
+
+  private static final String AUTH_HTTP_METHOD = "OPTIONS";
+
+  private static String keytabPrincipal = null;
+  private static String keytabFile = null;
+
+  /*
+  * Defines the Kerberos configuration that will be used to obtain the Kerberos principal from the
+  * Kerberos cache.
+  */
+  private static class KerberosConfiguration extends Configuration {
+
+    private static final String OS_LOGIN_MODULE_NAME;
+    private static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");
+    private static final boolean IS_64_BIT = System.getProperty("os.arch").contains("64");
+    private static final boolean AIX = System.getProperty("os.name").equals("AIX");
+
+    /* Return the OS login module class name */
+    private static String getOSLoginModuleName() {
+      if (IBM_JAVA) {
+        if (WINDOWS) {
+          return IS_64_BIT ? "com.ibm.security.auth.module.Win64LoginModule"
+              : "com.ibm.security.auth.module.NTLoginModule";
+        } else if (AIX) {
+          return IS_64_BIT ? "com.ibm.security.auth.module.AIX64LoginModule"
+              : "com.ibm.security.auth.module.AIXLoginModule";
+        } else {
+          return "com.ibm.security.auth.module.LinuxLoginModule";
+        }
+      } else {
+        return WINDOWS ? "com.sun.security.auth.module.NTLoginModule"
+            : "com.sun.security.auth.module.UnixLoginModule";
+      }
+    }
+
+    static {
+      OS_LOGIN_MODULE_NAME = getOSLoginModuleName();
+    }
+
+    private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
+      new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
+                                AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+                                new HashMap<String, String>());
+
+    private static final Map<String, String> KEYTAB_KERBEROS_OPTIONS
+        = new HashMap<String, String>();
+    static {
+      if (IBM_JAVA) {
+        KEYTAB_KERBEROS_OPTIONS.put("credsType", "both");
+        KEYTAB_KERBEROS_OPTIONS.put("useKeytab",
+            prependFileAuthority(keytabFile));
+      } else {
+        KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+        KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true");
+        KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true");
+        KEYTAB_KERBEROS_OPTIONS.put("keyTab", keytabFile);
+      }
+      KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
+      KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
+      KEYTAB_KERBEROS_OPTIONS.put("debug", "false");
+    }
+
+    private static final AppConfigurationEntry USER_KERBEROS_LOGIN =
+      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
+                                AppConfigurationEntry.LoginModuleControlFlag.OPTIONAL,
+                                KEYTAB_KERBEROS_OPTIONS);
+
+    private static final AppConfigurationEntry[] USER_KERBEROS_CONF =
+      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, USER_KERBEROS_LOGIN};
+
+    @Override
+    public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
+      return USER_KERBEROS_CONF;
+    }
+
+    private static String prependFileAuthority(String keytabPath) {
+      return keytabPath.startsWith("file://") ? keytabPath
+          : "file://" + keytabPath;
+    }
+  }
+  
+  private URL url;
+  private HttpURLConnection conn;
+  private Base64 base64;
+  private ConnectionConfigurator connConfigurator;
+
+  /**
+   * Sets a {@link ConnectionConfigurator} instance to use for
+   * configuring connections.
+   *
+   * @param configurator the {@link ConnectionConfigurator} instance.
+   */
+  @Override
+  public void setConnectionConfigurator(ConnectionConfigurator configurator) {
+    connConfigurator = configurator;
+  }
+
+  /**
+   * Performs SPNEGO authentication against the specified URL.
+   * <p>
+   * If a token is given it does a NOP and returns the given token.
+   * <p>
+   * If no token is given, it will perform the SPNEGO authentication sequence using an
+   * HTTP <code>OPTIONS</code> request.
+   *
+   * @param url the URl to authenticate against.
+   * @param token the authentication token being used for the user.
+   *
+   * @throws IOException if an IO error occurred.
+   * @throws AuthenticationException if an authentication error occurred.
+   */
+  @Override
+  public void authenticate(URL url, AuthenticatedURL.Token token)
+    throws IOException, AuthenticationException {
+
+    if (!token.isSet()) {
+      this.url = url;
+      base64 = new Base64(0);
+      conn = (HttpURLConnection) url.openConnection();
+      if (connConfigurator != null) {
+        conn = connConfigurator.configure(conn);
+      }
+      conn.setRequestMethod(AUTH_HTTP_METHOD);
+      conn.connect();
+      
+      boolean needFallback = false;
+      if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
+        LOG.debug("JDK performed authentication on our behalf.");
+        // If the JDK already did the SPNEGO back-and-forth for
+        // us, just pull out the token.
+        AuthenticatedURL.extractToken(conn, token);
+        if (isTokenKerberos(token)) {
+          return;
+        }
+        needFallback = true;
+      }
+      if (!needFallback && isNegotiate()) {
+        LOG.debug("Performing our own SPNEGO sequence.");
+        doSpnegoSequence(token);
+      } else {
+        throw new IOException("Should perform our own SPNEGO sequence");
+      }
+    }
+  }
+
+  public void setKeyTab(String keytabFile, String keytabPrincipal) {
+    this.keytabFile = keytabFile;
+    this.keytabPrincipal = keytabPrincipal;
+  }
+
+  /*
+   * Check if the passed token is of type "kerberos" or "kerberos-dt"
+   */
+  private boolean isTokenKerberos(AuthenticatedURL.Token token)
+      throws AuthenticationException {
+    if (token.isSet()) {
+      AuthToken aToken = AuthToken.parse(token.toString());
+      if (aToken.getType().equals("kerberos")
+          || aToken.getType().equals("kerberos-dt")) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /*
+  * Indicates if the response is starting a SPNEGO negotiation.
+  */
+  private boolean isNegotiate() throws IOException {
+    boolean negotiate = false;
+    if (conn.getResponseCode() == HttpURLConnection.HTTP_UNAUTHORIZED) {
+      String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
+      negotiate = authHeader != null && authHeader.trim().startsWith(NEGOTIATE);
+    }
+    return negotiate;
+  }
+
+  /**
+   * Implements the SPNEGO authentication sequence interaction using the current default principal
+   * in the Kerberos cache (normally set via kinit).
+   *
+   * @param token the authentication token being used for the user.
+   *
+   * @throws IOException if an IO error occurred.
+   * @throws AuthenticationException if an authentication error occurred.
+   */
+  private void doSpnegoSequence(AuthenticatedURL.Token token) throws IOException, AuthenticationException {
+    try {
+      AccessControlContext context = AccessController.getContext();
+      Subject subject = Subject.getSubject(context);
+      if (subject == null
+          || (subject.getPrivateCredentials(KerberosKey.class).isEmpty()
+              && subject.getPrivateCredentials(KerberosTicket.class).isEmpty())) {
+        LOG.debug("No subject in context, logging in");
+        subject = new Subject();
+        LoginContext login = new LoginContext("", subject,
+            null, new KerberosConfiguration());
+        login.login();
+      }
+
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Using subject: " + subject);
+      }
+      Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
+
+        @Override
+        public Void run() throws Exception {
+          GSSContext gssContext = null;
+          try {
+            GSSManager gssManager = GSSManager.getInstance();
+            String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP",
+                KerberosAuthenticator.this.url.getHost());
+            LOG.info("service principal is:" + servicePrincipal);
+            Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
+            GSSName serviceName = gssManager.createName(servicePrincipal,
+                                                        oid);
+            oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
+            gssContext = gssManager.createContext(serviceName, oid, null,
+                                                  GSSContext.DEFAULT_LIFETIME);
+            gssContext.requestCredDeleg(true);
+            gssContext.requestMutualAuth(true);
+
+            byte[] inToken = new byte[0];
+            byte[] outToken;
+            boolean established = false;
+
+            // Loop while the context is still not established
+            while (!established) {
+              outToken = gssContext.initSecContext(inToken, 0, inToken.length);
+              if (outToken != null) {
+                sendToken(outToken);
+              }
+
+              if (!gssContext.isEstablished()) {
+                inToken = readToken();
+              } else {
+                established = true;
+              }
+            }
+          } finally {
+            if (gssContext != null) {
+              gssContext.dispose();
+              gssContext = null;
+            }
+          }
+          return null;
+        }
+      });
+    } catch (PrivilegedActionException ex) {
+      throw new AuthenticationException(ex.getException());
+    } catch (LoginException ex) {
+      throw new AuthenticationException(ex);
+    }
+    AuthenticatedURL.extractToken(conn, token);
+  }
+
+  /*
+  * Sends the Kerberos token to the server.
+  */
+  private void sendToken(byte[] outToken) throws IOException {
+    String token = base64.encodeToString(outToken);
+    conn = (HttpURLConnection) url.openConnection();
+    if (connConfigurator != null) {
+      conn = connConfigurator.configure(conn);
+    }
+    conn.setRequestMethod(AUTH_HTTP_METHOD);
+    conn.setRequestProperty(AUTHORIZATION, NEGOTIATE + " " + token);
+    conn.connect();
+  }
+
+  /*
+  * Retrieves the Kerberos token returned by the server.
+  */
+  private byte[] readToken() throws IOException, AuthenticationException {
+    int status = conn.getResponseCode();
+    if (status == HttpURLConnection.HTTP_OK || status == HttpURLConnection.HTTP_UNAUTHORIZED) {
+      String authHeader = conn.getHeaderField(WWW_AUTHENTICATE);
+      if (authHeader == null || !authHeader.trim().startsWith(NEGOTIATE)) {
+        throw new AuthenticationException("Invalid SPNEGO sequence, '" + WWW_AUTHENTICATE
+            + "' header incorrect: " + authHeader);
+      }
+      String negotiation = authHeader.trim().substring((NEGOTIATE + " ").length()).trim();
+      return base64.decode(negotiation);
+    }
+    throw new AuthenticationException("Invalid SPNEGO sequence, status code: " + status);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosHasAuthenticator.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosHasAuthenticator.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosHasAuthenticator.java
new file mode 100644
index 0000000..7db0551
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosHasAuthenticator.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.common.spnego;
+
+public class KerberosHasAuthenticator extends KerberosAuthenticator {
+
+    public KerberosHasAuthenticator(String keytabFile, String keytabPrincipal) {
+        setKeyTab(keytabFile, keytabPrincipal);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosUtil.java
----------------------------------------------------------------------
diff --git a/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosUtil.java b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosUtil.java
new file mode 100644
index 0000000..b6e330d
--- /dev/null
+++ b/has/has-common/src/main/java/org/apache/hadoop/has/common/spnego/KerberosUtil.java
@@ -0,0 +1,262 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.common.spnego;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Set;
+import java.util.regex.Pattern;
+
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
+import org.ietf.jgss.GSSException;
+import org.ietf.jgss.Oid;
+
+import javax.security.auth.Subject;
+import javax.security.auth.kerberos.KerberosTicket;
+import javax.security.auth.kerberos.KeyTab;
+
+import static org.apache.hadoop.has.common.util.PlatformName.IBM_JAVA;
+
+/**
+ * Borrow the class from Apache Hadoop
+ */
+
+public class KerberosUtil {
+
+  /* Return the Kerberos login module name */
+  public static String getKrb5LoginModuleName() {
+    return (IBM_JAVA)
+      ? "com.ibm.security.auth.module.Krb5LoginModule"
+      : "com.sun.security.auth.module.Krb5LoginModule";
+  }
+
+  public static Oid getOidInstance(String oidName)
+      throws ClassNotFoundException, GSSException, NoSuchFieldException,
+      IllegalAccessException {
+    Class<?> oidClass;
+    if (IBM_JAVA) {
+      if ("NT_GSS_KRB5_PRINCIPAL".equals(oidName)) {
+        // IBM JDK GSSUtil class does not have field for krb5 principal oid
+        return new Oid("1.2.840.113554.1.2.2.1");
+      }
+      oidClass = Class.forName("com.ibm.security.jgss.GSSUtil");
+    } else {
+      oidClass = Class.forName("sun.security.jgss.GSSUtil");
+    }
+    Field oidField = oidClass.getDeclaredField(oidName);
+    return (Oid) oidField.get(oidClass);
+  }
+
+  public static String getDefaultRealm() 
+      throws ClassNotFoundException, NoSuchMethodException, 
+      IllegalArgumentException, IllegalAccessException, 
+      InvocationTargetException {
+    Object kerbConf;
+    Class<?> classRef;
+    Method getInstanceMethod;
+    Method getDefaultRealmMethod;
+    if (IBM_JAVA) {
+      classRef = Class.forName("com.ibm.security.krb5.internal.Config");
+    } else {
+      classRef = Class.forName("sun.security.krb5.Config");
+    }
+    getInstanceMethod = classRef.getMethod("getInstance", new Class[0]);
+    kerbConf = getInstanceMethod.invoke(classRef, new Object[0]);
+    getDefaultRealmMethod = classRef.getDeclaredMethod("getDefaultRealm",
+        new Class[0]);
+    return (String) getDefaultRealmMethod.invoke(kerbConf, new Object[0]);
+  }
+
+  public static String getDefaultRealmProtected() {
+    String realmString = null;
+    try {
+      realmString = getDefaultRealm();
+    } catch (RuntimeException rte) {
+      //silently catch everything
+    } catch (Exception e) {
+      //silently return null
+    }
+    return realmString;
+  }
+
+  /*
+   * For a Service Host Principal specification, map the host's domain
+   * to kerberos realm, as specified by krb5.conf [domain_realm] mappings.
+   * Unfortunately the mapping routines are private to the security.krb5
+   * package, so have to construct a PrincipalName instance to derive the realm.
+   *
+   * Many things can go wrong with Kerberos configuration, and this is not
+   * the place to be throwing exceptions to help debug them.  Nor do we choose
+   * to make potentially voluminous logs on every call to a communications API.
+   * So we simply swallow all exceptions from the underlying libraries and
+   * return null if we can't get a good value for the realmString.
+   *
+   * @param shortprinc A service principal name with host fqdn as instance, e.g.
+   *     "HTTP/myhost.mydomain"
+   * @return String value of Kerberos realm, mapped from host fqdn
+   *     May be default realm, or may be null.
+   */
+  public static String getDomainRealm(String shortprinc) {
+    Class<?> classRef;
+    Object principalName; //of type sun.security.krb5.PrincipalName or IBM equiv
+    String realmString = null;
+    try {
+      if (IBM_JAVA) {
+        classRef = Class.forName("com.ibm.security.krb5.PrincipalName");
+      } else {
+        classRef = Class.forName("sun.security.krb5.PrincipalName");
+      }
+      int tKrbNtSrvHst = classRef.getField("KRB_NT_SRV_HST").getInt(null);
+      principalName = classRef.getConstructor(String.class, int.class).
+          newInstance(shortprinc, tKrbNtSrvHst);
+      realmString = (String) classRef.getMethod("getRealmString", new Class[0]).
+          invoke(principalName, new Object[0]);
+    } catch (RuntimeException rte) {
+      //silently catch everything
+    } catch (Exception e) {
+      //silently return default realm (which may itself be null)
+    }
+    if (null == realmString || realmString.equals("")) {
+      return getDefaultRealmProtected();
+    } else {
+      return realmString;
+    }
+  }
+
+  /* Return fqdn of the current host */
+  static String getLocalHostName() throws UnknownHostException {
+    return InetAddress.getLocalHost().getCanonicalHostName();
+  }
+  
+  /**
+   * Create Kerberos principal for a given service and hostname,
+   * inferring realm from the fqdn of the hostname. It converts
+   * hostname to lower case. If hostname is null or "0.0.0.0", it uses
+   * dynamically looked-up fqdn of the current host instead.
+   * If domain_realm mappings are inadequately specified, it will
+   * use default_realm, per usual Kerberos behavior.
+   * If default_realm also gives a null value, then a principal
+   * without realm will be returned, which by Kerberos definitions is
+   * just another way to specify default realm.
+   *
+   * @param service
+   *          Service for which you want to generate the principal.
+   * @param hostname
+   *          Fully-qualified domain name.
+   * @return Converted Kerberos principal name.
+   * @throws UnknownHostException
+   *           If no IP address for the local host could be found.
+   */
+  public static final String getServicePrincipal(String service,
+      String hostname)
+      throws UnknownHostException {
+    String fqdn = hostname;
+    String shortprinc = null;
+    String realmString = null;
+    if (null == fqdn || fqdn.equals("") || fqdn.equals("0.0.0.0")) {
+      fqdn = getLocalHostName();
+    }
+    // convert hostname to lowercase as kerberos does not work with hostnames
+    // with uppercase characters.
+    fqdn = fqdn.toLowerCase(Locale.US);
+    shortprinc = service + "/" + fqdn;
+    // Obtain the realm name inferred from the domain of the host
+    realmString = getDomainRealm(shortprinc);
+    if (null == realmString || realmString.equals("")) {
+      return shortprinc;
+    } else {
+      return shortprinc + "@" + realmString;
+    }
+  }
+
+  /**
+   * Get all the unique principals present in the keytabfile.
+   * 
+   * @param keytabFileName 
+   *          Name of the keytab file to be read.
+   * @return list of unique principals in the keytab.
+   * @throws IOException 
+   *          If keytab entries cannot be read from the file.
+   */
+  static final String[] getPrincipalNames(String keytabFileName) throws IOException {
+    Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
+    Set<String> principals = new HashSet<String>();
+    List<PrincipalName> entries = keytab.getPrincipals();
+    for (PrincipalName entry : entries) {
+      principals.add(entry.getName().replace("\\", "/"));
+    }
+    return principals.toArray(new String[0]);
+  }
+
+  /**
+   * Get all the unique principals from keytabfile which matches a pattern.
+   * 
+   * @param keytab Name of the keytab file to be read.
+   * @param pattern pattern to be matched.
+   * @return list of unique principals which matches the pattern.
+   * @throws IOException if cannot get the principal name
+   */
+  public static final String[] getPrincipalNames(String keytab,
+      Pattern pattern) throws IOException {
+    String[] principals = getPrincipalNames(keytab);
+    if (principals.length != 0) {
+      List<String> matchingPrincipals = new ArrayList<String>();
+      for (String principal : principals) {
+        if (pattern.matcher(principal).matches()) {
+          matchingPrincipals.add(principal);
+        }
+      }
+      principals = matchingPrincipals.toArray(new String[0]);
+    }
+    return principals;
+  }
+
+  /**
+   * Check if the subject contains Kerberos keytab related objects.
+   * The Kerberos keytab object attached in subject has been changed
+   * from KerberosKey (JDK 7) to KeyTab (JDK 8)
+   *
+   *
+   * @param subject subject to be checked
+   * @return true if the subject contains Kerberos keytab
+   */
+  public static boolean hasKerberosKeyTab(Subject subject) {
+    return !subject.getPrivateCredentials(KeyTab.class).isEmpty();
+  }
+
+  /**
+   * Check if the subject contains Kerberos ticket.
+   *
+   *
+   * @param subject subject to be checked
+   * @return true if the subject contains Kerberos ticket
+   */
+  public static boolean hasKerberosTicket(Subject subject) {
+    return !subject.getPrivateCredentials(KerberosTicket.class).isEmpty();
+  }
+}


[09/10] directory-kerby git commit: Add the HAS project to Kerby.

Posted by pl...@apache.org.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAdminClient.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAdminClient.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAdminClient.java
new file mode 100644
index 0000000..3f5e3fa
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAdminClient.java
@@ -0,0 +1,480 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.client;
+
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.api.client.config.ClientConfig;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.client.urlconnection.HTTPSProperties;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.hadoop.has.common.HasAdmin;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.ssl.SSLFactory;
+import org.apache.hadoop.has.common.util.URLConnectionFactory;
+import org.apache.kerby.kerberos.kerb.common.KrbUtil;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.glassfish.jersey.SslConfigurator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLSession;
+import javax.ws.rs.core.MultivaluedMap;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A Admin client API for applications to interact with KDC
+ */
+public class HasAdminClient implements HasAdmin {
+
+    public static final Logger LOG = LoggerFactory.getLogger(HasAdminClient.class);
+
+    private HasConfig hasConfig;
+    private File confDir;
+
+    public HasAdminClient(HasConfig hasConfig) {
+        this.hasConfig = hasConfig;
+    }
+    public HasAdminClient(HasConfig hasConfig, File confDir) {
+        this.hasConfig = hasConfig;
+        this.confDir = confDir;
+    }
+
+    public File getConfDir() {
+        return confDir;
+    }
+
+    public HasConfig getHasConfig() {
+        return hasConfig;
+    }
+
+    protected HttpURLConnection getHttpsConnection(URL url, boolean isSpnego) throws Exception {
+        HasConfig conf = new HasConfig();
+
+        conf.setString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
+        String sslClientConf = hasConfig.getSslClientConf();
+        conf.setString(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConf);
+        conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, false);
+
+        URLConnectionFactory connectionFactory = URLConnectionFactory
+                .newDefaultURLConnectionFactory(conf);
+        return (HttpURLConnection) connectionFactory.openConnection(url, isSpnego, hasConfig);
+    }
+
+    private WebResource getWebResource(String restName) {
+        Client client;
+        String server = null;
+        if ((hasConfig.getHttpsPort() != null) && (hasConfig.getHttpsHost() != null)) {
+            server = "https://" + hasConfig.getHttpsHost() + ":" + hasConfig.getHttpsPort()
+                    + "/has/v1/" + restName;
+            LOG.info("Admin request url: " + server);
+            HasConfig conf = new HasConfig();
+            try {
+                conf.addIniConfig(new File(hasConfig.getSslClientConf()));
+            } catch (IOException e) {
+                throw new RuntimeException("Errors occurred when adding ssl conf. "
+                    + e.getMessage());
+            }
+            SslConfigurator sslConfigurator = SslConfigurator.newInstance()
+                    .trustStoreFile(conf.getString("ssl.client.truststore.location"))
+                    .trustStorePassword(conf.getString("ssl.client.truststore.password"));
+            sslConfigurator.securityProtocol("SSL");
+            SSLContext sslContext = sslConfigurator.createSSLContext();
+            ClientConfig clientConfig = new DefaultClientConfig();
+            clientConfig.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES,
+                    new HTTPSProperties(new HostnameVerifier() {
+                        @Override
+                        public boolean verify(String s, SSLSession sslSession) {
+                            return false;
+                        }
+                    }, sslContext));
+            client = Client.create(clientConfig);
+        } else {
+            client = Client.create();
+        }
+        if (server == null) {
+            throw new RuntimeException("Please set the https address and port.");
+        }
+        return client.resource(server);
+    }
+
+    /**
+     * Change principals JSON string to a List.
+     *
+     * @param princs principals JSON string which like
+     *               "["HTTP\/host1@HADOOP.COM","HTTP\/host2@HADOOP.COM"]"
+     * @return principalLists.
+     */
+    private List<String> getPrincsList(String princs) {
+        List<String> principalLists = new ArrayList<>();
+        try {
+            JSONArray principals = new JSONArray(princs);
+            for (int i = 0; i < principals.length(); i++) {
+                principalLists.add("\t" + principals.getString(i));
+            }
+        } catch (Exception e) {
+            System.err.println("Errors occurred when getting the principals."
+                + e.getMessage());
+        }
+        return principalLists;
+    }
+
+    public void requestCreatePrincipals(String hostRoles) throws HasException {
+        WebResource webResource = getWebResource("admin/createprincipals");
+        String response = webResource.entity(hostRoles.toString().getBytes()).put(String.class);
+        try {
+            System.out.println(new JSONObject(response).getString("msg"));
+        } catch (JSONException e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public void addPrincipal(String principal) throws HasException {
+        WebResource webResource = getWebResource("admin/addprincipal");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", principal);
+        String response = webResource.queryParams(params).post(String.class);
+        try {
+            System.out.println(new JSONObject(response).getString("msg"));
+        } catch (JSONException e) {
+            System.err.println("Errors occurred when getting the message from response."
+                + e.getMessage());
+        }
+    }
+
+    @Override
+    public File getKeytabByHostAndRole(String host, String role) throws HasException {
+        WebResource webResource = getWebResource("admin/exportkeytabs");
+
+        String keytabName = host + ".zip";
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("host", host);
+        if (!role.equals("")) {
+            params.add("role", role);
+            keytabName = role + "-" + host + ".keytab";
+        }
+        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
+        if (response.getStatus() != 200) {
+            System.err.println("Error : connection denied.");
+            return null;
+        }
+        FileOutputStream fos = null;
+        try {
+            fos = new FileOutputStream(new File(keytabName));
+        } catch (FileNotFoundException e) {
+            System.err.println(e.getMessage());
+        }
+        InputStream in = response.getEntityInputStream();
+        byte[] buffer = new byte[4 * 1024];
+        int read;
+        try {
+            while ((read = in.read(buffer)) > 0) {
+                fos.write(buffer, 0, read);
+            }
+            fos.close();
+            in.close();
+        } catch (IOException e) {
+            System.err.println("Errors occurred when reading the buffer to write keytab file."
+                + e.getMessage());
+        }
+        System.out.println("Accept keytab file \"" + keytabName + "\" from server.");
+        return new File(keytabName);
+    }
+
+    @Override
+    public void addPrincipal(String principal, String password) throws HasException {
+        WebResource webResource = getWebResource("admin/addprincipal");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", principal);
+        params.add("password", password);
+        String response = webResource.queryParams(params).post(String.class);
+        try {
+            System.out.println(new JSONObject(response).getString("msg"));
+        } catch (JSONException e) {
+            System.err.println("Errors occurred when getting the message from response."
+                + e.getMessage());
+        }
+    }
+
+    @Override
+    public void deletePrincipal(String principal) throws HasException {
+        WebResource webResource = getWebResource("admin/deleteprincipal");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", principal);
+        String response = webResource.queryParams(params).delete(String.class);
+        try {
+            System.out.println(new JSONObject(response).getString("msg"));
+        } catch (JSONException e) {
+            System.err.println("Errors occurred when getting the message from response."
+                + e.getMessage());
+        }
+    }
+
+    @Override
+    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
+        WebResource webResource = getWebResource("admin/renameprincipal");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("oldprincipal", oldPrincipal);
+        params.add("newprincipal", newPrincipal);
+        String response = webResource.queryParams(params).post(String.class);
+        try {
+            System.out.println(new JSONObject(response).getString("msg"));
+        } catch (JSONException e) {
+            System.err.println(e.getMessage());
+        }
+    }
+
+    @Override
+    public List<String> getPrincipals() throws HasException {
+        WebResource webResource = getWebResource("admin/getprincipals");
+
+        String response = webResource.get(String.class);
+        String princs = null;
+        try {
+            princs = new JSONObject(response).getString("msg");
+        } catch (JSONException e) {
+            System.err.println("Errors occurred when getting the message from response."
+                + e.getMessage());
+        }
+        return getPrincsList(princs);
+    }
+
+    @Override
+    public List<String> getPrincipals(String exp) throws HasException {
+        WebResource webResource = getWebResource("admin/getprincipals");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("exp", exp);
+        String response = webResource.queryParams(params).get(String.class);
+        return getPrincsList(response);
+    }
+
+    /**
+     * Create http connection to has server.
+     *
+     * @param url
+     * @param method
+     * @return connection
+     * @throws IOException
+     */
+    protected HttpURLConnection createConnection(URL url, String method) throws IOException {
+        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+        conn.setRequestMethod(method);
+        if (method.equals("POST") || method.equals("PUT")) {
+            conn.setDoOutput(true);
+        }
+        return conn;
+    }
+
+    @Override
+    public String addPrincByRole(String host, String role) throws HasException {
+        //TODO
+        return null;
+    }
+
+    @Override
+    public String getHadminPrincipal() {
+        return KrbUtil.makeKadminPrincipal(hasConfig.getRealm()).getName();
+    }
+
+    /**
+     * get size of principal
+     */
+    @Override
+    public int size() throws HasException {
+        return this.getPrincipals().size();
+    }
+
+    public String getKrb5conf() {
+        WebResource webResource = getWebResource("getkrb5conf");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            return response.getEntity(String.class);
+        }
+        return null;
+    }
+
+    public String getHasconf() {
+        WebResource webResource = getWebResource("gethasconf");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            return response.getEntity(String.class);
+        }
+        return null;
+    }
+    public void setPlugin(String plugin) {
+        WebResource webResource = getWebResource("conf/setplugin");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("plugin", plugin);
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            System.out.println(response.getEntity(String.class));
+        } else if (response.getStatus() == 400) {
+            System.err.println(response.getEntity(String.class));
+        }
+    }
+    public void configKdc(String port, String realm, String host) {
+        WebResource webResource = getWebResource("conf/configkdc");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("port", port);
+        params.add("realm", realm);
+        params.add("host", host);
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            System.out.println(response.getEntity(String.class));
+        } else if (response.getStatus() == 400) {
+            System.err.println(response.getEntity(String.class));
+        }
+    }
+    public void configKdcBackend(String backendType, String dir, String url, String user,
+                                 String password) {
+        WebResource webResource = getWebResource("conf/configkdcbackend");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("backendType", backendType);
+        if (backendType.equals("json")) {
+            params.add("dir", dir);
+        } else if (backendType.equals("mysql")) {
+            params.add("url", url);
+            params.add("user", user);
+            params.add("password", password);
+        }
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            System.out.println(response.getEntity(String.class));
+        } else if (response.getStatus() == 400) {
+            System.err.println(response.getEntity(String.class));
+        }
+    }
+    public void startKdc() {
+        WebResource webResource = getWebResource("kdcstart");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        try {
+            JSONObject result = new JSONObject(response.getEntity(String.class));
+            if (result.getString("result").equals("success")) {
+                System.out.println(result.getString("msg"));
+            } else {
+                System.err.println(result.getString("msg"));
+            }
+        } catch (JSONException e) {
+            System.err.println(e.getMessage());
+        }
+    }
+    public InputStream initKdc() {
+        WebResource webResource = getWebResource("kdcinit");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            return response.getEntityInputStream();
+        }
+        return null;
+    }
+    public String getHostRoles() {
+        WebResource webResource = getWebResource("hostroles");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            return response.getEntity(String.class);
+        }
+        return null;
+    }
+    public void setEnableOfConf(String isEnable) throws HasException {
+        WebResource webResource = getWebResource("admin/setconf");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("isEnable", isEnable);
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        if (response.getStatus() == 200) {
+            System.out.println(response.getEntity(String.class));
+        } else {
+            System.err.println(response.getEntity(String.class));
+        }
+    }
+
+    @Override
+    public void exportKeytab(File keytab, String principal) throws HasException {
+        WebResource webResource = getWebResource("admin/exportkeytab");
+
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("principal", principal);
+        ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
+        FileOutputStream fos;
+        try {
+            fos = new FileOutputStream(keytab);
+        } catch (FileNotFoundException e) {
+            throw new HasException("The keytab file: " + keytab + "not exist. " + e);
+        }
+        InputStream in = response.getEntityInputStream();
+        byte[] buffer = new byte[4 * 1024];
+        int read;
+        try {
+            while ((read = in.read(buffer)) > 0) {
+                fos.write(buffer, 0, read);
+            }
+            fos.close();
+            in.close();
+        } catch (IOException e) {
+            System.err.println("Errors occurred when writing the buffer to keytab file." + e.toString());
+        }
+        System.out.println("Accept keytab file \"" + keytab.getName() + "\" from server successfully.");
+    }
+
+    @Override
+    public void exportKeytab(File keytabFile, List<String> principals) throws HasException {
+        WebResource webResource = getWebResource("admin/exportkeytab");
+        for (String principal: principals) {
+            MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+            params.add("principal", principal);
+            ClientResponse response = webResource.queryParams(params).get(ClientResponse.class);
+            FileOutputStream fos;
+            try {
+                fos = new FileOutputStream(keytabFile);
+            } catch (FileNotFoundException e) {
+                throw new HasException("The keytab file: " + keytabFile.getName() + "not exist. " + e);
+            }
+            InputStream in = response.getEntityInputStream();
+            byte[] buffer = new byte[4 * 1024];
+            int read;
+            try {
+                while ((read = in.read(buffer)) > 0) {
+                    fos.write(buffer, 0, read);
+                }
+                fos.close();
+                in.close();
+            } catch (IOException e) {
+                LOG.error("Errors occurred when writing the buffer to keytab file." + e.toString());
+            }
+        }
+        System.out.println("Accept keytab file \"" + keytabFile.getName() + "\" from server successfully.");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAuthAdminClient.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAuthAdminClient.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAuthAdminClient.java
new file mode 100644
index 0000000..d7e3f5a
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasAuthAdminClient.java
@@ -0,0 +1,553 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.client;
+
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.ProtocolException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+public class HasAuthAdminClient extends HasAdminClient {
+    public static final Logger LOG = LoggerFactory.getLogger(HasAuthAdminClient.class);
+
+    /**
+     * Create an instance of the HasAuthAdminClient.
+     *
+     * @param hasConfig the has config
+     */
+    public HasAuthAdminClient(HasConfig hasConfig) {
+        super(hasConfig);
+    }
+
+    /**
+     * Create an authenticated connection to the Has server.
+     * <p>
+     * It uses Hadoop-auth client authentication which by default supports
+     * Kerberos HTTP SPNEGO, Pseudo/Simple and anonymous.
+     *
+     * @param url    the URL to open a HTTP connection to.
+     * @param method the HTTP method for the HTTP connection.
+     * @return an authenticated connection to the has server.
+     * @throws IOException if an IO error occurred.
+     */
+    @Override
+    protected HttpURLConnection createConnection(URL url, String method) {
+        HttpURLConnection conn = null;
+        if ((getHasConfig().getHttpsPort() != null) && (getHasConfig().getHttpsHost() != null)) {
+            try {
+                conn = super.getHttpsConnection(url, true);
+            } catch (Exception e) {
+                System.err.println(e.getMessage());
+            }
+        }
+        if (method.equals("POST") || method.equals("PUT")) {
+            conn.setDoOutput(true);
+        }
+        return conn;
+    }
+
+    private String getBaseURL() {
+        String url = null;
+        if ((getHasConfig().getHttpsPort() != null) && (getHasConfig().getHttpsHost() != null)) {
+            url = "https://" + getHasConfig().getHttpsHost() + ":" + getHasConfig().getHttpsPort()
+                + "/has/v1/admin/";
+        }
+        if (url == null) {
+            throw new RuntimeException("Please set the https address and port.");
+        }
+        return url;
+    }
+
+    public void addPrincipal(String principal) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "addprincipal?principal=" + principal);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "POST");
+
+        httpConn.setRequestProperty("Content-Type",
+            "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("POST");
+        } catch (ProtocolException e) {
+            LOG.error("Fail to add principal. " + e);
+            throw new HasException(e);
+        }
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(getResponse(httpConn));
+            } else {
+                throw new HasException("Fail to add principal \"" + principal + "\".");
+            }
+        } catch (Exception e) {
+            LOG.error("Fail to add principal. " + e);
+            throw new HasException(e);
+        }
+    }
+
+    public void setEnableOfConf(String isEnable) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "setconf?isEnable=" + isEnable);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "PUT");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("PUT");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+            InputStream inputStream = httpConn.getResponseCode() == 200
+                    ? httpConn.getInputStream() : httpConn.getErrorStream();
+            BufferedReader reader = new BufferedReader(
+                    new InputStreamReader(inputStream));
+            String s;
+            StringBuilder result = new StringBuilder();
+            while ((s = reader.readLine()) != null) {
+                result.append(s);
+            }
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(result);
+            } else {
+                System.err.println(result);
+            }
+        } catch (Exception e) {
+            LOG.error("Fail to connect to server. " + e);
+            throw new HasException(e);
+        }
+    }
+
+    /**
+     * Change principals JSON string to a List.
+     *
+     * @param princs principals JSON string which like
+     *               "["HTTP\/host1@HADOOP.COM","HTTP\/host2@HADOOP.COM"]"
+     * @return principalLists.
+     */
+    private List<String> getPrincsList(String princs) {
+        List<String> principalLists = new ArrayList<>();
+        try {
+            JSONArray principals = new JSONArray(princs);
+            for (int i = 0; i < principals.length(); i++) {
+                principalLists.add("\t" + principals.getString(i));
+            }
+        } catch (Exception e) {
+            System.err.println(e.getMessage());
+        }
+        return principalLists;
+    }
+
+    @Override
+    public void requestCreatePrincipals(String hostRoles) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "createprincipals");
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "POST");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("PUT");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        httpConn.setDoOutput(true);
+        httpConn.setDoInput(true);
+        try {
+            httpConn.connect();
+            OutputStream out = httpConn.getOutputStream();
+            out.write(hostRoles.toString().getBytes());
+            out.flush();
+            out.close();
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(getResponse(httpConn));
+            } else {
+                throw new HasException("Connection deined.");
+            }
+        } catch (Exception e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public File getKeytabByHostAndRole(String host, String role) throws HasException {
+        String keytabName = host + ".zip";
+        HttpURLConnection httpConn;
+        String request = getBaseURL() + "exportkeytabs?host=" + host;
+        if (!role.equals("")) {
+            request = request + "&role=" + role;
+            keytabName = role + "-" + host + ".keytab";
+        }
+
+        URL url;
+        try {
+            url = new URL(request);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "GET");
+
+        httpConn.setRequestProperty("Content-Type",
+            "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("GET");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        httpConn.setDoOutput(true);
+        httpConn.setDoInput(true);
+        try {
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() != 200) {
+                System.err.println("Error : connection denied.");
+                return null;
+            }
+            FileOutputStream fos = new FileOutputStream(new File(keytabName));
+            InputStream in = httpConn.getInputStream();
+            byte[] buffer = new byte[4 * 1024];
+            int read;
+            while ((read = in.read(buffer)) > 0) {
+                fos.write(buffer, 0, read);
+            }
+            fos.close();
+            in.close();
+        } catch (IOException e) {
+            throw new HasException(e);
+        }
+        System.out.println("Accept keytab file \"" + keytabName + "\" from server.");
+
+        return new File(keytabName);
+    }
+
+    @Override
+    public void exportKeytab(File keytab, String principal) throws HasException {
+        URL url = null;
+        try {
+            url = new URL(getBaseURL() + "exportkeytab?principal=" + principal);
+        } catch (MalformedURLException e) {
+            LOG.error("Fail to get url. " + e);
+            throw new HasException("Fail to get url.", e);
+        }
+
+        HttpURLConnection httpConn = createConnection(url, "GET");
+        httpConn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("GET");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        httpConn.setDoOutput(true);
+        httpConn.setDoInput(true);
+        try {
+            httpConn.connect();
+            if (httpConn.getResponseCode() != 200) {
+                System.err.println("Error: connection denied.");
+            }
+            FileOutputStream fos = new FileOutputStream(keytab);
+            InputStream in = httpConn.getInputStream();
+            byte[] buffer = new byte[3 * 1024];
+            int read;
+            while ((read = in.read(buffer)) > 0) {
+                fos.write(buffer, 0, read);
+            }
+            fos.close();
+            in.close();
+        } catch (IOException e) {
+            throw new HasException(e);
+        }
+        System.out.println("Receive keytab file \"" + keytab.getName() + "\" from server successfully.");
+    }
+
+    @Override
+    public void exportKeytab(File keytabFile, List<String> principals) throws HasException {
+        HttpURLConnection httpConn;
+        for (String principal: principals) {
+            String request = getBaseURL() + "exportkeytab?principal=" + principal;
+            URL url;
+            try {
+                url = new URL(request);
+            } catch (MalformedURLException e) {
+                throw new HasException(e);
+            }
+            httpConn = createConnection(url, "GET");
+            httpConn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
+            try {
+                httpConn.setRequestMethod("GET");
+            } catch (ProtocolException e) {
+                throw new HasException(e);
+            }
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            try {
+                httpConn.connect();
+                if (httpConn.getResponseCode() != 200) {
+                    System.err.println("Error: connection denied.");
+                }
+                FileOutputStream fos = new FileOutputStream(keytabFile);
+                InputStream in = httpConn.getInputStream();
+                byte[] buffer = new byte[4 * 1024];
+                int read;
+                while ((read = in.read(buffer)) > 0) {
+                    fos.write(buffer, 0, read);
+                }
+                fos.close();
+                in.close();
+            } catch (IOException e) {
+                throw new HasException(e);
+            }
+        }
+        System.out.println("Accept keytab file \"" + keytabFile.getName() + "\" from server.");
+    }
+
+    @Override
+    public void addPrincipal(String principal, String password) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url = null;
+        try {
+            url = new URL(getBaseURL() + "addprincipal?principal=" + principal
+                            + "&password=" + password);
+        } catch (MalformedURLException e) {
+            throw new HasException("Fail to get url.", e);
+        }
+
+        httpConn = createConnection(url, "POST");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("POST");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(getResponse(httpConn));
+            } else {
+                throw new HasException("Fail to add principal \"" + principal + "\".");
+            }
+        } catch (Exception e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public void deletePrincipal(String principal) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "deleteprincipal?principal=" + principal);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "DELETE");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("DELETE");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(getResponse(httpConn));
+            } else {
+                throw new HasException("Connection deined.");
+            }
+        } catch (Exception e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "renameprincipal?oldprincipal=" + oldPrincipal
+                            + "&newprincipal=" + newPrincipal);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "POST");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("POST");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                System.out.println(getResponse(httpConn));
+            } else {
+                throw new HasException("Connection to renameprincipal deined.");
+            }
+        } catch (Exception e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public List<String> getPrincipals() throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "getprincipals");
+        } catch (MalformedURLException e) {
+            System.err.println(e.getMessage());
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "GET");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("GET");
+        } catch (ProtocolException e) {
+            throw new HasException(e);
+        }
+        String response;
+        try {
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                response = getResponse(httpConn);
+            } else {
+                throw new HasException("Connection to getprincipals deined.");
+            }
+        } catch (Exception e) {
+            LOG.error("Fail to get principals." + e);
+            throw new HasException("Fail to get principals.", e);
+        }
+        return getPrincsList(response);
+    }
+
+    @Override
+    public List<String> getPrincipals(String exp) throws HasException {
+        HttpURLConnection httpConn;
+
+        URL url;
+        try {
+            url = new URL(getBaseURL() + "getprincipals?exp=" + exp);
+        } catch (MalformedURLException e) {
+            throw new HasException(e);
+        }
+
+        httpConn = createConnection(url, "GET");
+
+        httpConn.setRequestProperty("Content-Type",
+                "application/json; charset=UTF-8");
+        try {
+            httpConn.setRequestMethod("GET");
+        } catch (ProtocolException e) {
+            LOG.error("Fail to get the principals with expression. " + e);
+            throw new HasException("Fail to get the principals with expression.", e);
+        }
+        String response;
+        try {
+            httpConn.setDoOutput(true);
+            httpConn.setDoInput(true);
+            httpConn.connect();
+
+            if (httpConn.getResponseCode() == 200) {
+                response = getResponse(httpConn);
+            } else {
+                throw new HasException("Connection to getprincipals deined.");
+            }
+        } catch (Exception e) {
+            throw new HasException(e);
+        }
+        return getPrincsList(response);
+    }
+
+    private String getResponse(HttpURLConnection httpConn) throws Exception {
+        StringBuilder data = new StringBuilder();
+        BufferedReader br = new BufferedReader(new InputStreamReader(httpConn.getInputStream()));
+        String s;
+        while ((s = br.readLine()) != null) {
+            data.append(s);
+        }
+        return new JSONObject(data.toString()).getString("msg");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClient.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClient.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClient.java
new file mode 100755
index 0000000..5f612d3
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClient.java
@@ -0,0 +1,677 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.client;
+
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.ClientHandlerException;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.text.CharacterPredicates;
+import org.apache.commons.text.RandomStringGenerator;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasConfigKey;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.ssl.SSLFactory;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.common.util.URLConnectionFactory;
+import org.apache.kerby.kerberos.kerb.KrbCodec;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.KrbRuntime;
+import org.apache.kerby.kerberos.kerb.crypto.EncryptionHandler;
+import org.apache.kerby.kerberos.kerb.provider.TokenEncoder;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptedData;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.KeyUsage;
+import org.apache.kerby.kerberos.kerb.type.base.KrbError;
+import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
+import org.apache.kerby.kerberos.kerb.type.base.KrbMessageType;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
+import org.apache.kerby.kerberos.kerb.type.kdc.EncAsRepPart;
+import org.apache.kerby.kerberos.kerb.type.kdc.EncKdcRepPart;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcRep;
+import org.apache.kerby.kerberos.kerb.type.ticket.TgtTicket;
+import org.apache.kerby.util.IOUtil;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.ProtocolException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.nio.ByteBuffer;
+import java.security.GeneralSecurityException;
+import java.security.KeyStore;
+import java.security.PublicKey;
+import java.security.cert.CertificateException;
+import java.security.cert.CertificateFactory;
+import java.security.cert.X509Certificate;
+import java.util.Date;
+
+/**
+ * HAS client
+ */
+public class HasClient {
+
+    public static final Logger LOG = LoggerFactory.getLogger(HasClient.class);
+
+    public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf";
+    public static final String HAS_HTTP_PORT_DEFAULT = "9870";
+    public static final String HAS_CONFIG_DEFAULT = "/etc/has/has-client.conf";
+    public static final String CA_ROOT_DEFAULT = "/etc/has/ca-root.pem";
+
+    private String hadoopSecurityHas = null;
+    private String type;
+    private File clientConfigFolder;
+
+
+    public HasClient() { }
+
+    /**
+     * Create an instance of the HasClient.
+     *
+     * @param hadoopSecurityHas the has config
+     */
+    public HasClient(String hadoopSecurityHas) {
+        this.hadoopSecurityHas = hadoopSecurityHas;
+    }
+
+
+    public TgtTicket requestTgt() throws HasException {
+        HasConfig config;
+        if (hadoopSecurityHas == null) {
+            String hasClientConf = System.getenv("HAS_CLIENT_CONF");
+            if (hasClientConf == null) {
+                hasClientConf = HAS_CONFIG_DEFAULT;
+            }
+            LOG.debug("has-client conf path: " + hasClientConf);
+            File confFile = new File(hasClientConf);
+            if (!confFile.exists()) {
+                throw new HasException("The HAS client config file: " + hasClientConf
+                    + " does not exist.");
+            }
+            try {
+                config = HasUtil.getHasConfig(confFile);
+            } catch (HasException e) {
+                LOG.error("Failed to get has client config: " + e.getMessage());
+                throw new HasException("Failed to get has client config: " + e);
+            }
+        } else {
+            config = new HasConfig();
+            String[] urls = hadoopSecurityHas.split(";");
+            String host = "";
+            int port = 0;
+            try {
+                for (String url : urls) {
+                    URI uri = new URI(url.trim());
+
+                    // parse host
+                    host = host + uri.getHost() + ",";
+
+                    // parse port
+                    if (port == 0) {
+                        port = uri.getPort();
+                    } else {
+                        if (port != uri.getPort()) {
+                            throw new HasException("Invalid port: not even.");
+                        }
+                    }
+
+                    // We will get the auth type from env first
+                    type = System.getenv("auth_type");
+                    // parse host
+                    if (type == null) {
+                        String[] strs = uri.getQuery().split("=");
+                        if (strs[0].equals("auth_type")) {
+                            type = strs[1];
+                        } else {
+                            LOG.warn("No auth type in conf.");
+                        }
+                    }
+                }
+                if (host == null || port == 0) {
+                    throw new HasException("host is null.");
+                } else {
+                    host = host.substring(0, host.length() - 1);
+                    config.setString(HasConfigKey.HTTPS_HOST, host);
+                    config.setInt(HasConfigKey.HTTPS_PORT, port);
+                    config.setString(HasConfigKey.AUTH_TYPE, type);
+                }
+            } catch (URISyntaxException e) {
+                LOG.error("Errors occurred when getting web url. " + e.getMessage());
+                throw new HasException(
+                    "Errors occurred when getting web url. " + e.getMessage());
+            }
+        }
+        if (config == null) {
+            throw new HasException("Failed to get HAS client config.");
+        }
+        clientConfigFolder = new File("/etc/has/" + config.getHttpsHost());
+        if (!clientConfigFolder.exists()) {
+            clientConfigFolder.mkdirs();
+        }
+
+        // get and set ssl-client/trustStore first
+        String sslClientConfPath = clientConfigFolder + "/ssl-client.conf";
+        loadSslClientConf(config, sslClientConfPath);
+        config.setString(HasConfigKey.SSL_CLIENT_CONF, sslClientConfPath);
+
+        createKrb5Conf(config);
+
+        HasClientPlugin plugin;
+        try {
+            plugin = getClientTokenPlugin(config);
+        } catch (HasException e) {
+            LOG.error("Failed to get client token plugin from config: " + e.getMessage());
+            throw new HasException(
+                "Failed to get client token plugin from config: " + e.getMessage());
+        }
+        AuthToken authToken;
+        try {
+            authToken = plugin.login(config);
+        } catch (HasLoginException e) {
+            LOG.error("Plugin login failed: " + e.getMessage());
+            throw new HasException(
+                "Plugin login failed: " + e.getMessage());
+        }
+        type = plugin.getLoginType();
+
+        LOG.info("The plugin type is: " + type);
+
+        return requestTgt(authToken, type, config);
+    }
+
+    private void createKrb5Conf(HasConfig config) throws HasException {
+        HasAdminClient hasAdminClient = new HasAdminClient(config);
+        File krb5Conf = new File(clientConfigFolder + "/krb5.conf");
+        if (!krb5Conf.exists()) {
+            String content = hasAdminClient.getKrb5conf();
+            if (content == null) {
+                LOG.error("Failed to get krb5.conf.");
+                throw new HasException("Failed to get krb5.conf.");
+            }
+            try {
+                PrintStream ps = new PrintStream(new FileOutputStream(krb5Conf));
+                ps.println(content);
+                LOG.info("krb5.conf has saved in : " + krb5Conf.getAbsolutePath());
+            } catch (FileNotFoundException e) {
+                LOG.error(e.getMessage());
+                throw new HasException(e);
+            }
+        }
+        System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5Conf.getAbsolutePath());
+    }
+
+
+    private HasClientPlugin getClientTokenPlugin(HasConfig config) throws HasException {
+        String pluginName = config.getPluginName();
+        LOG.info("The plugin name getting from config is: " + pluginName);
+        HasClientPlugin clientPlugin;
+        if (pluginName != null) {
+            clientPlugin = HasClientPluginRegistry.createPlugin(pluginName);
+        } else {
+            throw new HasException("Please set the plugin name in has client conf");
+        }
+        if (clientPlugin == null) {
+            throw new HasException("Failed to create client plugin: " + pluginName);
+        }
+        LOG.info("The plugin class is: " + clientPlugin);
+
+        return clientPlugin;
+    }
+
+    /**
+     * Request a TGT with user token, plugin type and has config.
+     * @param authToken
+     * @param type
+     * @param config
+     * @return TGT
+     * @throws HasException e
+     */
+    public TgtTicket requestTgt(AuthToken authToken, String type, HasConfig config)
+        throws HasException {
+        TokenEncoder tokenEncoder = KrbRuntime.getTokenProvider("JWT").createTokenEncoder();
+
+        String tokenString;
+        try {
+            tokenString = tokenEncoder.encodeAsString(authToken);
+        } catch (KrbException e) {
+            LOG.debug("Failed to decode the auth token.");
+            throw new HasException("Failed to decode the auth token." + e.getMessage());
+        }
+
+        JSONObject json = null;
+        int responseStatus = 0;
+        boolean success = false;
+        if ((config.getHttpsPort() != null) && (config.getHttpsHost() != null)) {
+            String sslClientConfPath = clientConfigFolder + "/ssl-client.conf";
+            config.setString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
+            config.setString(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfPath);
+            config.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, false);
+
+            URLConnectionFactory connectionFactory = URLConnectionFactory
+                .newDefaultURLConnectionFactory(config);
+
+            URL url;
+            String[] hosts = config.getHttpsHost().split(",");
+            for (String host : hosts) {
+                try {
+                    url = new URL("https://" + host.trim() + ":" + config.getHttpsPort()
+                        + "/has/v1?type=" + type + "&authToken=" + tokenString);
+                } catch (MalformedURLException e) {
+                    LOG.warn("Failed to get url. " + e.toString());
+                    continue;
+                }
+                HttpURLConnection conn;
+                try {
+                    conn = (HttpURLConnection) connectionFactory.openConnection(url);
+                } catch (IOException e) {
+                    LOG.warn("Failed to open connection. " + e.toString());
+                    continue;
+                }
+
+                conn.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
+                try {
+                    conn.setRequestMethod("PUT");
+                } catch (ProtocolException e) {
+                    LOG.warn("Failed to set request method. " + e.toString());
+                    continue;
+                }
+                conn.setDoOutput(true);
+                conn.setDoInput(true);
+
+                try {
+                    conn.connect();
+
+                    responseStatus = conn.getResponseCode();
+                    switch (responseStatus) {
+                        case 200:
+                        case 201:
+                            BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream()));
+                            StringBuilder sb = new StringBuilder();
+                            String line;
+                            while ((line = br.readLine()) != null) {
+                                sb.append(line + "\n");
+                            }
+                            br.close();
+
+                            json = new JSONObject(sb.toString());
+                    }
+
+                } catch (IOException | JSONException e) {
+                    LOG.warn("ERROR! " + e.toString());
+                    continue;
+                }
+
+                if (responseStatus == 200 || responseStatus == 201) {
+                    success = true;
+                    break;
+                }
+            }
+            if (!success) {
+                throw new HasException("Failed : HTTP error code : "
+                    + responseStatus);
+            }
+        } else {
+            WebResource webResource;
+            Client client = Client.create();
+            String[] hosts = config.getHttpHost().split(",");
+            for (String host : hosts) {
+                webResource = client
+                    .resource("http://" + host.trim() + ":" + config.getHttpPort()
+                        + "/has/v1?type=" + type + "&authToken="
+                        + tokenString);
+                try {
+                    ClientResponse response = webResource.accept("application/json")
+                        .put(ClientResponse.class);
+
+                    if (response.getStatus() != 200) {
+                        LOG.warn("WARN! " + response.getEntity(String.class));
+                        responseStatus = response.getStatus();
+                        continue;
+                    }
+                    json = response.getEntity(JSONObject.class);
+                } catch (ClientHandlerException e) {
+                    LOG.warn("WARN! " + e.toString());
+                    continue;
+                }
+                success = true;
+                break;
+            }
+            if (!success) {
+                throw new HasException("Failed : HTTP error code : "
+                    + responseStatus);
+            }
+        }
+
+        LOG.debug("Return from Server .... \n");
+
+        try {
+            return handleResponse(json, (String) authToken.getAttributes().get("passPhrase"));
+        } catch (HasException e) {
+            LOG.debug("Failed to handle response when requesting tgt ticket in client."
+                + e.getMessage());
+            throw new HasException(e);
+        }
+    }
+
+    private File loadSslClientConf(HasConfig config, String sslClientConfPath) throws HasException {
+        File sslClientConf = new File(sslClientConfPath);
+        if (!sslClientConf.exists()) {
+            String httpHost = config.getHttpHost();
+            String httpPort = config.getHttpPort();
+            if (httpHost == null) {
+                LOG.info("Can't find the http host in config, the https host will be used.");
+                httpHost = config.getHttpsHost();
+            }
+            if (httpPort == null) {
+                LOG.info("Can't find the http port in config, the default http port will be used.");
+                httpPort = HAS_HTTP_PORT_DEFAULT;
+            }
+            X509Certificate certificate = getCertificate(httpHost, httpPort);
+            if (verifyCertificate(certificate)) {
+                String password = createTrustStore(config.getHttpsHost(), certificate);
+                createClientSSLConfig(password);
+            } else {
+                throw new HasException("The certificate from HAS server is invalid.");
+            }
+        }
+        return sslClientConf;
+    }
+
+    public KrbMessage getKrbMessage(JSONObject json) throws HasException {
+
+        LOG.debug("Starting to get the message from has server.");
+
+        try {
+            boolean success = json.getBoolean("success");
+            if (!success) {
+                throw new HasException("Failed: " + json.getString("krbMessage"));
+            }
+        } catch (JSONException e) {
+            LOG.debug("Failed to get message." + e);
+            throw new HasException("Failed to get message." + e);
+        }
+
+        String typeString;
+        try {
+            typeString = json.getString("type");
+        } catch (JSONException e) {
+            LOG.debug("Failed to get message." + e);
+            throw new HasException("Failed to get message." + e);
+        }
+
+        if (typeString != null && typeString.equals(type)) {
+            LOG.debug("The message type is " + type);
+            String krbMessageString = null;
+            try {
+                krbMessageString = json.getString("krbMessage");
+            } catch (JSONException e) {
+                LOG.debug("Failed to get the krbMessage. " + e);
+            }
+            Base64 base64 = new Base64(0);
+            byte[] krbMessage = base64.decode(krbMessageString);
+            ByteBuffer byteBuffer = ByteBuffer.wrap(krbMessage);
+            KrbMessage kdcRep;
+            try {
+                kdcRep = KrbCodec.decodeMessage(byteBuffer);
+            } catch (IOException e) {
+                throw new HasException("Krb decoding message failed", e);
+            }
+            return kdcRep;
+        } else {
+            throw new HasException("Can't get the right message from server.");
+        }
+    }
+
+    public TgtTicket handleResponse(JSONObject json, String passPhrase)
+        throws HasException {
+        KrbMessage kdcRep = getKrbMessage(json);
+
+        KrbMessageType messageType = kdcRep.getMsgType();
+        if (messageType == KrbMessageType.AS_REP) {
+            return processResponse((KdcRep) kdcRep, passPhrase);
+        } else if (messageType == KrbMessageType.KRB_ERROR) {
+            KrbError error = (KrbError) kdcRep;
+            LOG.error("KDC server response with message: "
+                + error.getErrorCode().getMessage());
+
+            throw new HasException(error.getEtext());
+        }
+        return null;
+    }
+
+    public TgtTicket processResponse(KdcRep kdcRep, String passPhrase)
+        throws HasException {
+
+        PrincipalName clientPrincipal = kdcRep.getCname();
+        String clientRealm = kdcRep.getCrealm();
+        clientPrincipal.setRealm(clientRealm);
+
+        // Get the client to decrypt the EncryptedData
+        EncryptionKey clientKey = null;
+        try {
+            clientKey = HasUtil.getClientKey(clientPrincipal.getName(),
+                passPhrase,
+                kdcRep.getEncryptedEncPart().getEType());
+        } catch (KrbException e) {
+            throw new HasException("Could not generate key. " + e.getMessage());
+        }
+
+        byte[] decryptedData = decryptWithClientKey(kdcRep.getEncryptedEncPart(),
+            KeyUsage.AS_REP_ENCPART, clientKey);
+        if ((decryptedData[0] & 0x1f) == 26) {
+            decryptedData[0] = (byte) (decryptedData[0] - 1);
+        }
+        EncKdcRepPart encKdcRepPart = new EncAsRepPart();
+        try {
+            encKdcRepPart.decode(decryptedData);
+        } catch (IOException e) {
+            throw new HasException("Failed to decode EncAsRepPart", e);
+        }
+        kdcRep.setEncPart(encKdcRepPart);
+
+//        if (getChosenNonce() != encKdcRepPart.getNonce()) {
+//            throw new KrbException("Nonce didn't match");
+//        }
+
+//        PrincipalName returnedServerPrincipal = encKdcRepPart.getSname();
+//        returnedServerPrincipal.setRealm(encKdcRepPart.getSrealm());
+//        PrincipalName requestedServerPrincipal = getServerPrincipal();
+//        if (requestedServerPrincipal.getRealm() == null) {
+//            requestedServerPrincipal.setRealm(getContext().getKrbSetting().getKdcRealm());
+//        }
+//        if (!returnedServerPrincipal.equals(requestedServerPrincipal)) {
+//            throw new KrbException(KrbErrorCode.KDC_ERR_SERVER_NOMATCH);
+//        }
+
+//        HostAddresses hostAddresses = getHostAddresses();
+//        if (hostAddresses != null) {
+//            List<HostAddress> requestHosts = hostAddresses.getElements();
+//            if (!requestHosts.isEmpty()) {
+//                List<HostAddress> responseHosts = encKdcRepPart.getCaddr().getElements();
+//                for (HostAddress h : requestHosts) {
+//                    if (!responseHosts.contains(h)) {
+//                        throw new KrbException("Unexpected client host");
+//                    }
+//                }
+//            }
+//        }
+
+        TgtTicket tgtTicket = getTicket(kdcRep);
+        LOG.info("Ticket expire time: " + tgtTicket.getEncKdcRepPart().getEndTime());
+        return tgtTicket;
+
+    }
+
+    protected byte[] decryptWithClientKey(EncryptedData data,
+                                          KeyUsage usage,
+                                          EncryptionKey clientKey) throws HasException {
+        if (clientKey == null) {
+            throw new HasException("Client key isn't available");
+        }
+        try {
+            return EncryptionHandler.decrypt(data, clientKey, usage);
+        } catch (KrbException e) {
+            throw new HasException("Errors occurred when decrypting the data." + e.getMessage());
+        }
+    }
+
+    /**
+     * Get the tgt ticket from KdcRep
+     *
+     * @param kdcRep
+     */
+    public TgtTicket getTicket(KdcRep kdcRep) {
+        TgtTicket tgtTicket = new TgtTicket(kdcRep.getTicket(),
+            (EncAsRepPart) kdcRep.getEncPart(), kdcRep.getCname());
+        return tgtTicket;
+    }
+
+    /**
+     * Get certificate from HAS server.
+     *
+     */
+    private X509Certificate getCertificate(String host, String port) throws HasException {
+        X509Certificate certificate;
+        Client client = Client.create();
+        WebResource webResource = client.resource("http://" + host + ":" + port + "/has/v1/getcert");
+        ClientResponse response = webResource.get(ClientResponse.class);
+        if (response.getStatus() != 200) {
+            throw new HasException(response.getEntity(String.class));
+        }
+        try {
+            CertificateFactory factory = CertificateFactory.getInstance("X.509");
+            InputStream in = response.getEntityInputStream();
+            certificate = (X509Certificate) factory.generateCertificate(in);
+        } catch (CertificateException e) {
+            throw new HasException("Failed to get certificate from HAS server", e);
+        }
+
+        return certificate;
+    }
+
+    /**
+     * Verify certificate.
+     */
+    private boolean verifyCertificate(X509Certificate certificate) throws HasException {
+        // Check if certificate is expired
+        try {
+            Date date = new Date();
+            certificate.checkValidity(date);
+        } catch (GeneralSecurityException e) {
+            return false;
+        }
+
+        // Get certificate from ca root
+        X509Certificate caRoot;
+        try {
+            //Get the ca root path from env, client should export it.
+            String caRootPath = System.getenv("CA_ROOT");
+            if (caRootPath == null) {
+                caRootPath = CA_ROOT_DEFAULT;
+            }
+            File caRootFile;
+            if (caRootPath != null) {
+                caRootFile = new File(caRootPath);
+                if (!caRootFile.exists()) {
+                    throw new HasException("CA_ROOT: " + caRootPath + " not exist.");
+                }
+            } else {
+                throw new HasException("Please set the CA_ROOT.");
+            }
+
+            CertificateFactory factory = CertificateFactory.getInstance("X.509");
+            FileInputStream in = new FileInputStream(caRootFile);
+            caRoot = (X509Certificate) factory.generateCertificate(in);
+        } catch (CertificateException | FileNotFoundException e) {
+            throw new HasException("Failed to get certificate from ca root file", e);
+        }
+
+        // Verify certificate with root certificate
+        try {
+            PublicKey publicKey = caRoot.getPublicKey();
+            certificate.verify(publicKey);
+        } catch (GeneralSecurityException e) {
+            return false;
+        }
+
+        return true;
+    }
+
+    /**
+     * Create and save truststore file based on certificate.
+     *
+     */
+    private String createTrustStore(String host, X509Certificate certificate) throws HasException {
+        KeyStore trustStore;
+
+        // Create password
+        RandomStringGenerator generator = new RandomStringGenerator.Builder()
+            .withinRange('a', 'z')
+            .filteredBy(CharacterPredicates.LETTERS, CharacterPredicates.DIGITS)
+            .build();
+        String password = generator.generate(15);
+
+        File trustStoreFile = new File(clientConfigFolder + "/truststore.jks");
+        try {
+            trustStore = KeyStore.getInstance("jks");
+            trustStore.load(null, null);
+            trustStore.setCertificateEntry(host, certificate);
+            FileOutputStream out = new FileOutputStream(trustStoreFile);
+            trustStore.store(out, password.toCharArray());
+            out.close();
+        } catch (IOException | GeneralSecurityException e) {
+            throw new HasException("Failed to create and save truststore file", e);
+        }
+        return password;
+    }
+
+    /**
+     * Create ssl configuration file for client.
+     *
+     */
+    private void createClientSSLConfig(String password) throws HasException {
+        String resourcePath = "/ssl-client.conf.template";
+        InputStream templateResource = getClass().getResourceAsStream(resourcePath);
+        try {
+            String content = IOUtil.readInput(templateResource);
+            content = content.replaceAll("_location_", clientConfigFolder.getAbsolutePath()
+                + "/truststore.jks");
+            content = content.replaceAll("_password_", password);
+
+            IOUtil.writeFile(content, new File(clientConfigFolder + "/ssl-client.conf"));
+        } catch (IOException e) {
+            throw new HasException("Failed to create client ssl configuration file", e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPlugin.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPlugin.java
new file mode 100644
index 0000000..4bd0749
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPlugin.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.has.client;
+
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+
+public interface HasClientPlugin {
+
+    /**
+     * Get the login module type ID, used to distinguish this module from others.
+     * Should correspond to the server side module.
+     *
+     * @return login type
+     */
+    String getLoginType();
+
+    /**
+     * Perform all the client side login logics, the results wrapped in an AuthToken,
+     * will be validated by HAS server.
+     *
+     * @param conf token plugin config
+     * @return user auth token
+     */
+    AuthToken login(HasConfig conf) throws HasLoginException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPluginRegistry.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPluginRegistry.java
new file mode 100644
index 0000000..0254ed6
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasClientPluginRegistry.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.client;
+
+import org.apache.hadoop.has.common.HasException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.ServiceLoader;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class HasClientPluginRegistry {
+    static final Logger LOG = LoggerFactory.getLogger(HasClientPluginRegistry.class);
+
+    private static Map<String, Class> allPlugins = new ConcurrentHashMap<>();
+
+    static {
+        ServiceLoader<HasClientPlugin> plugins = ServiceLoader.load(HasClientPlugin.class);
+
+        for (HasClientPlugin plugin : plugins) {
+            allPlugins.put(plugin.getLoginType(), plugin.getClass());
+        }
+    }
+
+    public static Set<String> registeredPlugins() {
+        return Collections.unmodifiableSet(allPlugins.keySet());
+    }
+
+    public static boolean registeredPlugin(String name) {
+        return allPlugins.containsKey(name);
+    }
+
+    public static HasClientPlugin createPlugin(String name) throws HasException {
+        if (!registeredPlugin(name)) {
+            throw new HasException("Unregistered plugin " + name);
+        }
+        try {
+            HasClientPlugin clientPlugin = (HasClientPlugin) allPlugins.get(name).newInstance();
+            return clientPlugin;
+        } catch (Exception e) {
+            LOG.error("Create {} plugin failed", name, e);
+            throw new HasException(e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginException.java
----------------------------------------------------------------------
diff --git a/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginException.java b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginException.java
new file mode 100644
index 0000000..c07eb59
--- /dev/null
+++ b/has/has-client/src/main/java/org/apache/hadoop/has/client/HasLoginException.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.client;
+
+import org.apache.hadoop.has.common.HasException;
+
+public class HasLoginException extends HasException {
+    private static final long serialVersionUID = 4140429098192628252L;
+
+    public HasLoginException(Throwable cause) {
+        super(cause);
+    }
+
+    public HasLoginException(String message) {
+        super(message);
+    }
+
+    public HasLoginException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+}