You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@directory.apache.org by Colm O hEigeartaigh <co...@apache.org> on 2017/11/15 09:56:26 UTC

Re: [01/10] directory-kerby git commit: Add the HAS project to Kerby.

Hi Jiajia,

What is this new branch for?

Colm.

On Wed, Nov 15, 2017 at 5:12 AM, <pl...@apache.org> wrote:

> Repository: directory-kerby
> Updated Branches:
>   refs/heads/has 1e6d36497 -> be5805660
>
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
> new file mode 100644
> index 0000000..322eafd
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalCmd.java
> @@ -0,0 +1,61 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +public class AddPrincipalCmd extends HadminCmd {
> +
> +    public static final String USAGE = "Usage: add_principal [options]
> <principal-name>\n"
> +            + "\toptions are:\n"
> +            + "\t\t[-randkey]\n"
> +            + "\t\t[-pw password]"
> +            + "\tExample:\n"
> +            + "\t\tadd_principal -pw mypassword alice\n";
> +
> +    public AddPrincipalCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +
> +        if (items.length < 2) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +
> +        String clientPrincipal = items[items.length - 1];
> +        if (!items[1].startsWith("-")) {
> +            getHadmin().addPrincipal(clientPrincipal);
> +        } else if (items[1].startsWith("-randkey")) {
> +            getHadmin().addPrincipal(clientPrincipal);
> +        } else if (items[1].startsWith("-pw")) {
> +            String password = items[2];
> +            getHadmin().addPrincipal(clientPrincipal, password);
> +        } else {
> +            System.err.println("add_principal cmd format error.");
> +            System.err.println(USAGE);
> +            return;
> +        }
> +        System.out.println("Success to add principal :" +
> clientPrincipal);
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
> new file mode 100644
> index 0000000..b38f2c7
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/AddPrincipalsCmd.java
> @@ -0,0 +1,78 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +import org.codehaus.jettison.json.JSONArray;
> +import org.codehaus.jettison.json.JSONObject;
> +import org.slf4j.Logger;
> +import org.slf4j.LoggerFactory;
> +
> +import java.io.BufferedReader;
> +import java.io.File;
> +import java.io.FileReader;
> +
> +public class AddPrincipalsCmd extends HadminCmd {
> +    private static final Logger LOG = LoggerFactory.getLogger(
> AddPrincipalsCmd.class);
> +
> +    private static final String USAGE = "\nUsage: create_principals
> [hostRoles-file]\n"
> +            + "\t'hostRoles-file' is a file with a hostRoles json string
> like:\n"
> +            + "\t\t{HOSTS: [ {\"name\":\"host1\",\"hostRoles\":\"HDFS\"},
> "
> +            + "{\"name\":\"host2\",\"hostRoles\":\"HDFS,HBASE\"} ] }\n"
> +            + "\tExample:\n"
> +            + "\t\tcreate_principals hostroles.txt\n";
> +
> +    public AddPrincipalsCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        if (items.length != 2) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +
> +        File hostRoles = new File(items[1]);
> +        if (!hostRoles.exists()) {
> +            throw new HasException("HostRoles file is not exists.");
> +        }
> +        try {
> +            BufferedReader reader = new BufferedReader(new
> FileReader(hostRoles));
> +            StringBuilder sb = new StringBuilder();
> +            String tempString;
> +            while ((tempString = reader.readLine()) != null) {
> +                sb.append(tempString);
> +            }
> +            JSONArray hostArray = new JSONObject(sb.toString()).
> optJSONArray("HOSTS");
> +            for (int i = 0; i < hostArray.length(); i++) {
> +                JSONObject host = (JSONObject) hostArray.get(i);
> +                String[] roles = host.getString("hostRoles").split(",");
> +                for (String role : roles) {
> +                    System.out.println(getHadmin().addPrincByRole(host.
> getString("name"),
> +                            role.toUpperCase()));
> +                }
> +            }
> +        } catch (Exception e) {
> +            throw new HasException("Failed to execute creating
> principals, because : " + e.getMessage());
> +        }
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
> new file mode 100644
> index 0000000..98458ec
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DeletePrincipalCmd.java
> @@ -0,0 +1,80 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +import java.io.Console;
> +import java.util.Scanner;
> +
> +public class DeletePrincipalCmd extends HadminCmd {
> +
> +    public static final String USAGE = "Usage: delete_principal
> <principal-name>\n"
> +            + "\tExample:\n"
> +            + "\t\tdelete_principal alice\n";
> +
> +    private Boolean force = false;
> +
> +    public DeletePrincipalCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        if (items.length < 2) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +        String principal = items[items.length - 1];
> +        String reply;
> +        Console console = System.console();
> +        String prompt = "Are you sure to delete the principal? (yes/no,
> YES/NO, y/n, Y/N) ";
> +        if (console == null) {
> +            System.out.println("Couldn't get Console instance, "
> +                    + "maybe you're running this from within an IDE. "
> +                    + "Use scanner to read password.");
> +            Scanner scanner = new Scanner(System.in, "UTF-8");
> +            reply = getReply(scanner, prompt);
> +        } else {
> +            reply = getReply(console, prompt);
> +        }
> +        if (reply.equals("yes") || reply.equals("YES") ||
> reply.equals("y") || reply.equals("Y")) {
> +            getHadmin().deletePrincipal(principal);
> +            System.out.println("Success to delete " + principal);
> +        } else if (reply.equals("no") || reply.equals("NO") ||
> reply.equals("n") || reply.equals("N")) {
> +            System.out.println("Principal \"" + principal + "\"  not
> deleted.");
> +        } else {
> +            System.err.println("Unknown request, fail to delete the
> principal.");
> +            System.err.println(USAGE);
> +        }
> +    }
> +
> +    private String getReply(Scanner scanner, String prompt) {
> +        System.out.println(prompt);
> +        return scanner.nextLine().trim();
> +    }
> +
> +    private String getReply(Console console, String prompt) {
> +        console.printf(prompt);
> +        String line = console.readLine();
> +        return line;
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/
> DisableConfigureCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
> new file mode 100644
> index 0000000..66eb5cb
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/DisableConfigureCmd.java
> @@ -0,0 +1,40 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +public class DisableConfigureCmd extends HadminCmd {
> +
> +    public static final String USAGE = "Usage: enable_configure\n"
> +            + "\tExample:\n"
> +            + "\t\tenable\n";
> +
> +    public DisableConfigureCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        getHadmin().setEnableOfConf("false");
> +        System.out.println("Set conf disable.");
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
> new file mode 100644
> index 0000000..f40a6c6
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/EnableConfigureCmd.java
> @@ -0,0 +1,40 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +public class EnableConfigureCmd extends HadminCmd {
> +
> +    public static final String USAGE = "Usage: enable_configure\n"
> +            + "\tExample:\n"
> +            + "\t\tenable\n";
> +
> +    public EnableConfigureCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        getHadmin().setEnableOfConf("true");
> +        System.out.println("Set conf enable.");
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
> new file mode 100644
> index 0000000..c5b130c
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ExportKeytabsCmd.java
> @@ -0,0 +1,57 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +import org.apache.hadoop.has.server.web.HostRoleType;
> +
> +import java.io.File;
> +
> +public class ExportKeytabsCmd extends HadminCmd {
> +    private static final String USAGE = "\nUsage: export_keytabs <host>
> [role]\n"
> +            + "\tExample:\n"
> +            + "\t\texport_keytabs host1 HDFS\n";
> +
> +    public ExportKeytabsCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        if (items.length < 2) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +        String host = items[1];
> +        if (items.length >= 3) {
> +            exportKeytab(host, items[2]);
> +            return;
> +        }
> +        for (HostRoleType r : HostRoleType.values()) {
> +            exportKeytab(host, r.getName());
> +        }
> +    }
> +
> +    public void exportKeytab(String host, String role) throws
> HasException {
> +        File keytab = new File(role + "-" + host + ".keytab");
> +        getHadmin().getKeytabByHostAndRole(host, role, keytab);
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
> new file mode 100644
> index 0000000..ebaf07f
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetHostRolesCmd.java
> @@ -0,0 +1,36 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +public class GetHostRolesCmd extends HadminCmd {
> +    private static final String USAGE = "Usage: get_hostroles\n"
> +            + "\tExample:\n"
> +            + "\t\tget_hostroles\n";
> +
> +    public GetHostRolesCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) {
> +        getHadmin().getHostRoles();
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
> new file mode 100644
> index 0000000..88612a8
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/GetPrincipalCmd.java
> @@ -0,0 +1,76 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +import org.apache.kerby.kerberos.kerb.identity.KrbIdentity;
> +import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
> +import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
> +
> +import java.util.Map;
> +
> +public class GetPrincipalCmd extends HadminCmd {
> +    private static final String USAGE = "Usage: getprinc principalName\n"
> +        + "\tExample:\n"
> +        + "\t\tgetprinc hello@TEST.COM\"\n";
> +
> +    public GetPrincipalCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +  @Override
> +  public void execute(String[] items) {
> +    if (items.length != 2) {
> +      System.err.println(USAGE);
> +      return;
> +    }
> +
> +    String princName = items[items.length - 1];
> +    KrbIdentity identity = null;
> +    try {
> +      identity = getHadmin().getPrincipal(princName);
> +    } catch (HasException e) {
> +      System.err.println("Fail to get principal: " + princName + ". " +
> e.getMessage());
> +    }
> +
> +    if (identity == null) {
> +      System.err.println(princName + " doesn't exist\n");
> +      System.err.println(USAGE);
> +      return;
> +    }
> +
> +    Map<EncryptionType, EncryptionKey> key = identity.getKeys();
> +
> +    System.out.println(
> +        "Principal: " + identity.getPrincipalName() + "\n"
> +            + "Expiration data: " + identity.getExpireTime() + "\n"
> +            + "Created time: "
> +            + identity.getCreatedTime() + "\n"
> +            + "KDC flags: " + identity.getKdcFlags() + "\n"
> +            + "Key version: " + identity.getKeyVersion() + "\n"
> +            + "Number of keys: " + key.size()
> +    );
> +
> +    for (EncryptionType keyType : key.keySet()) {
> +      System.out.println("key: " + keyType);
> +    }
> +  }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
> new file mode 100644
> index 0000000..95ce59f
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/HadminCmd.java
> @@ -0,0 +1,42 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +public abstract class HadminCmd {
> +
> +    private LocalHasAdmin hadmin;
> +
> +    public HadminCmd(LocalHasAdmin hadmin) {
> +        this.hadmin = hadmin;
> +    }
> +
> +    protected LocalHasAdmin getHadmin() {
> +        return hadmin;
> +    }
> +
> +    /**
> +     * Execute the hadmin cmd.
> +     * @param input Input cmd to execute
> +     */
> +    public abstract void execute(String[] input) throws HasException;
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
> new file mode 100644
> index 0000000..99e05e2
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/KeytabAddCmd.java
> @@ -0,0 +1,91 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +import java.io.File;
> +import java.util.List;
> +
> +public class KeytabAddCmd extends HadminCmd {
> +    private static final String USAGE =
> +        "Usage: ktadd [-k[eytab] keytab] [-q] [-e keysaltlist]
> [-norandkey] [principal | -glob princ-exp] [...]";
> +
> +    private static final String DEFAULT_KEYTAB_FILE_LOCATION =
> "/etc/krb5.keytab";
> +
> +    public KeytabAddCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) {
> +
> +        String principal = null;
> +        String keytabFileLocation = null;
> +        Boolean glob = false;
> +
> +        //Since commands[0] is ktadd, the initial index is 1.
> +        int index = 1;
> +        while (index < items.length) {
> +            String command = items[index];
> +            if (command.equals("-k")) {
> +                index++;
> +                if (index >= items.length) {
> +                    System.err.println(USAGE);
> +                    return;
> +                }
> +                keytabFileLocation = items[index].trim();
> +
> +            } else if (command.equals("-glob")) {
> +                glob = true;
> +            } else if (!command.startsWith("-")) {
> +                principal = command;
> +            }
> +            index++;
> +        }
> +
> +        if (keytabFileLocation == null) {
> +            keytabFileLocation = DEFAULT_KEYTAB_FILE_LOCATION;
> +        }
> +        File keytabFile = new File(keytabFileLocation);
> +
> +        if (principal == null) {
> +            System.out.println((glob ? "princ-exp" : "principal") + " not
> specified!");
> +            System.err.println(USAGE);
> +            return;
> +        }
> +
> +        try {
> +            if (glob) {
> +                List<String> principals = getHadmin().getPrincipals(
> principal);
> +                if (principals.size() != 0) {
> +                    getHadmin().exportKeytab(keytabFile, principals);
> +                }
> +            } else {
> +                getHadmin().exportKeytab(keytabFile, principal);
> +            }
> +            System.out.println("Principal export to keytab file : " +
> keytabFile + " successful .");
> +        } catch (HasException e) {
> +            System.err.println("Principal \"" + principal + "\" fail to
> add entry to keytab."
> +                    + e.getMessage());
> +        }
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
> new file mode 100644
> index 0000000..ef9e7f7
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/ListPrincipalsCmd.java
> @@ -0,0 +1,63 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +import java.util.List;
> +
> +public class ListPrincipalsCmd extends HadminCmd {
> +    private static final String USAGE = "Usage: list_principals
> [expression]\n"
> +            + "\t'expression' is a shell-style glob expression that can
> contain the wild-card characters ?, *, and []."
> +            + "\tExample:\n"
> +            + "\t\tlist_principals [expression]\n";
> +
> +    public ListPrincipalsCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        if (items.length > 2) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +
> +        List<String> principalLists = null;
> +
> +        if (items.length == 1) {
> +            principalLists = getHadmin().getPrincipals();
> +        } else {
> +            //have expression
> +            String exp = items[1];
> +            principalLists = getHadmin().getPrincipals(exp);
> +        }
> +
> +        if (principalLists.size() == 0 || principalLists.size() == 1 &&
> principalLists.get(0).isEmpty()) {
> +            return;
> +        } else {
> +            System.out.println("Principals are listed:");
> +            for (int i = 0; i < principalLists.size(); i++) {
> +                System.out.println(principalLists.get(i));
> +            }
> +        }
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/has-server-tool/src/main/java/
> org/apache/hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
> b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
> new file mode 100644
> index 0000000..2c0ba20
> --- /dev/null
> +++ b/has/has-tool/has-server-tool/src/main/java/org/apache/
> hadoop/has/tool/server/hadmin/local/cmd/RenamePrincipalCmd.java
> @@ -0,0 +1,82 @@
> +/**
> + *  Licensed to the Apache Software Foundation (ASF) under one
> + *  or more contributor license agreements.  See the NOTICE file
> + *  distributed with this work for additional information
> + *  regarding copyright ownership.  The ASF licenses this file
> + *  to you under the Apache License, Version 2.0 (the
> + *  "License"); you may not use this file except in compliance
> + *  with the License.  You may obtain a copy of the License at
> + *
> + *    http://www.apache.org/licenses/LICENSE-2.0
> + *
> + *  Unless required by applicable law or agreed to in writing,
> + *  software distributed under the License is distributed on an
> + *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
> + *  KIND, either express or implied.  See the License for the
> + *  specific language governing permissions and limitations
> + *  under the License.
> + *
> + */
> +package org.apache.hadoop.has.tool.server.hadmin.local.cmd;
> +
> +import org.apache.hadoop.has.common.HasException;
> +import org.apache.hadoop.has.server.admin.LocalHasAdmin;
> +
> +import java.io.Console;
> +import java.util.Scanner;
> +
> +public class RenamePrincipalCmd extends HadminCmd {
> +    public static final String USAGE = "Usage: rename_principal
> <old_principal_name>"
> +            + " <new_principal_name>\n"
> +            + "\tExample:\n"
> +            + "\t\trename_principal alice bob\n";
> +
> +    public RenamePrincipalCmd(LocalHasAdmin hadmin) {
> +        super(hadmin);
> +    }
> +
> +    @Override
> +    public void execute(String[] items) throws HasException {
> +        if (items.length < 3) {
> +            System.err.println(USAGE);
> +            return;
> +        }
> +
> +        String oldPrincipalName = items[items.length - 2];
> +        String newPrincipalName = items[items.length - 1];
> +
> +        String reply;
> +        Console console = System.console();
> +        String prompt = "Are you sure to rename the principal? (yes/no,
> YES/NO, y/n, Y/N) ";
> +        if (console == null) {
> +            System.out.println("Couldn't get Console instance, "
> +                    + "maybe you're running this from within an IDE. "
> +                    + "Use scanner to read password.");
> +            Scanner scanner = new Scanner(System.in, "UTF-8");
> +            reply = getReply(scanner, prompt);
> +        } else {
> +            reply = getReply(console, prompt);
> +        }
> +        if (reply.equals("yes") || reply.equals("YES") ||
> reply.equals("y") || reply.equals("Y")) {
> +            getHadmin().renamePrincipal(oldPrincipalName,
> newPrincipalName);
> +            System.out.println("Success to rename principal : \"" +
> oldPrincipalName
> +                + "\" to \"" + newPrincipalName + "\".");
> +        } else if (reply.equals("no") || reply.equals("NO") ||
> reply.equals("n") || reply.equals("N")) {
> +            System.out.println("Principal \"" + oldPrincipalName + "\"
> not renamed.");
> +        } else {
> +            System.err.println("Unknown request, fail to rename the
> principal.");
> +            System.err.println(USAGE);
> +        }
> +    }
> +
> +    private String getReply(Scanner scanner, String prompt) {
> +        System.out.println(prompt);
> +        return scanner.nextLine().trim();
> +    }
> +
> +    private String getReply(Console console, String prompt) {
> +        console.printf(prompt);
> +        String line = console.readLine();
> +        return line;
> +    }
> +}
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/has-tool/pom.xml
> ----------------------------------------------------------------------
> diff --git a/has/has-tool/pom.xml b/has/has-tool/pom.xml
> new file mode 100644
> index 0000000..a43041a
> --- /dev/null
> +++ b/has/has-tool/pom.xml
> @@ -0,0 +1,23 @@
> +<?xml version="1.0" encoding="UTF-8"?>
> +<project xmlns="http://maven.apache.org/POM/4.0.0"
> +         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
> +         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
> http://maven.apache.org/xsd/maven-4.0.0.xsd">
> +
> +  <parent>
> +    <groupId>org.apache.hadoop</groupId>
> +    <artifactId>has-project</artifactId>
> +    <version>1.0.0-SNAPSHOT</version>
> +  </parent>
> +
> +  <modelVersion>4.0.0</modelVersion>
> +  <artifactId>has-tool</artifactId>
> +  <packaging>pom</packaging>
> +  <description>HAS tool</description>
> +  <name>HAS tool</name>
> +
> +  <modules>
> +    <module>has-client-tool</module>
> +    <module>has-server-tool</module>
> +  </modules>
> +
> +</project>
> \ No newline at end of file
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/pom.xml
> ----------------------------------------------------------------------
> diff --git a/has/pom.xml b/has/pom.xml
> new file mode 100644
> index 0000000..ad80711
> --- /dev/null
> +++ b/has/pom.xml
> @@ -0,0 +1,128 @@
> +<?xml version="1.0" encoding="UTF-8"?>
> +<!--
> +  Licensed under the Apache License, Version 2.0 (the "License");
> +  you may not use this file except in compliance with the License.
> +  You may obtain a copy of the License at
> +
> +    http://www.apache.org/licenses/LICENSE-2.0
> +
> +  Unless required by applicable law or agreed to in writing, software
> +  distributed under the License is distributed on an "AS IS" BASIS,
> +  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
> +  See the License for the specific language governing permissions and
> +  limitations under the License. See accompanying LICENSE file.
> +-->
> +
> +<project xmlns="http://maven.apache.org/POM/4.0.0"
> +         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
> +         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
> http://maven.apache.org/xsd/maven-4.0.0.xsd">
> +
> +  <parent>
> +    <groupId>org.apache</groupId>
> +    <artifactId>apache</artifactId>
> +    <version>18</version>
> +    <relativePath/>
> +  </parent>
> +
> +  <modelVersion>4.0.0</modelVersion>
> +  <groupId>org.apache.hadoop</groupId>
> +  <artifactId>has-project</artifactId>
> +  <version>1.0.0-SNAPSHOT</version>
> +  <description>Hadoop Authentication Server</description>
> +  <name>Hadoop Authentication Server</name>
> +  <packaging>pom</packaging>
> +
> +  <modules>
> +    <module>has-common</module>
> +    <module>has-plugins</module>
> +    <module>has-server</module>
> +    <module>has-client</module>
> +    <module>has-dist</module>
> +    <module>has-tool</module>
> +  </modules>
> +
> +  <properties>
> +    <commons-codec.version>1.4</commons-codec.version>
> +    <kerby.version>1.1.0-SNAPSHOT</kerby.version>
> +    <slf4j.version>1.7.25</slf4j.version>
> +    <buildtools.dir>${basedir}/build-tools</buildtools.dir>
> +  </properties>
> +
> +  <build>
> +    <plugins>
> +      <plugin>
> +        <groupId>org.apache.maven.plugins</groupId>
> +        <artifactId>maven-compiler-plugin</artifactId>
> +        <configuration>
> +          <source>1.8</source>
> +          <target>1.8</target>
> +        </configuration>
> +      </plugin>
> +
> +      <plugin>
> +        <groupId>org.apache.maven.plugins</groupId>
> +        <artifactId>maven-checkstyle-plugin</artifactId>
> +        <version>2.17</version>
> +        <configuration>
> +          <configLocation>${buildtools.dir}/has-checkstyle.xml</
> configLocation>
> +          <includeTestSourceDirectory>true</includeTestSourceDirectory>
> +          <encoding>UTF-8</encoding>
> +          <failOnViolation>true</failOnViolation>
> +        </configuration>
> +        <executions>
> +          <execution>
> +            <id>validate</id>
> +            <phase>validate</phase>
> +            <goals>
> +              <goal>check</goal>
> +            </goals>
> +          </execution>
> +        </executions>
> +      </plugin>
> +
> +      <plugin>
> +        <groupId>org.apache.maven.plugins</groupId>
> +        <artifactId>maven-surefire-plugin</artifactId>
> +        <version>2.17</version>
> +        <configuration>
> +          <runOrder>alphabetical</runOrder>
> +        </configuration>
> +      </plugin>
> +    </plugins>
> +  </build>
> +
> +  <profiles>
> +    <profile>
> +      <id>nochecks</id>
> +      <properties>
> +        <pmd.skip>true</pmd.skip>
> +        <checkstyle.skip>true</checkstyle.skip>
> +      </properties>
> +    </profile>
> +    <profile>
> +      <id>activate-buildtools-in-module</id>
> +      <activation>
> +        <file>
> +          <exists>${basedir}/../build-tools/has-checkstyle.xml</exists>
> +        </file>
> +      </activation>
> +      <properties>
> +        <buildtools.dir>${basedir}/../build-tools</buildtools.dir>
> +      </properties>
> +    </profile>
> +    <profile>
> +      <id>activate-buildtools-in-submodule</id>
> +      <activation>
> +        <file>
> +          <exists>${basedir}/../../build-tools/has-checkstyle.
> xml</exists>
> +        </file>
> +      </activation>
> +      <properties>
> +        <buildtools.dir>${basedir}/../../build-tools</buildtools.dir>
> +      </properties>
> +    </profile>
> +  </profiles>
> +
> +</project>
> +
> +
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/hadoop/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/hadoop/README.md b/has/supports/hadoop/README.md
> new file mode 100644
> index 0000000..15f177c
> --- /dev/null
> +++ b/has/supports/hadoop/README.md
> @@ -0,0 +1,339 @@
> +Enable Hadoop
> +================
> +
> +## 1. Build Hadoop
> +
> +### Apply the patch to hadoop-2.7.2 source code
> +```
> +git apply hadoop-2.7.2.patch
> +```
> +
> +### Build
> +```
> +mvn package -Pdist,native -Dtar -DskipTests -Dmaven.javadoc.skip=true
> -Dcontainer-executor.conf.dir=/etc/hadoop/conf
> +```
> +
> +### Redeploy hadoop
> +
> +## 2. Distribute and configure Keytab files
> +
> +### Create keytab and deploy krb5.conf and has-client.conf
> +Please look at [How to start HAS](https://github.com/intel-
> bigdata/has/blob/release-1.0.0/doc/has-start.md) for details.
> +
> +### Distribute keytab files to the corresponding nodes.
> +
> +### Set permission of keytab files
> +```
> +// Keytab files should be read-only
> +chmod 400 *.keytab
> +```
> +
> +## 3. Update hadoop configuration files
> +
> +### Update core-site.xml
> +add the following properties:
> +```
> +<property>
> +  <name>hadoop.security.authorization</name>
> +  <value>true</value>
> +</property>
> +<property>
> +  <name>hadoop.security.authentication</name>
> +  <value>kerberos</value>
> +</property>
> +<property>
> +   <name>hadoop.security.authentication.use.has</name>
> +   <value>true</value>
> +</property>
> +```
> +
> +### Update hdfs-site.xml
> +add the following properties:
> +```
> +<!-- General HDFS security config -->
> +<property>
> +  <name>dfs.block.access.token.enable</name>
> +  <value>true</value>
> +</property>
> +
> +<!-- NameNode security config -->
> +<property>
> +  <name>dfs.namenode.keytab.file</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +<property>
> +  <name>dfs.namenode.kerberos.principal</name>
> +  <value>hdfs/_HOST@HADOOP.COM</value>
> +</property>
> +<property>
> +  <name>dfs.namenode.kerberos.internal.spnego.principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +</property>
> +<property>
> +  <name>dfs.namenode.delegation.token.max-lifetime</name>
> +  <value>604800000</value>
> +  <description>The maximum lifetime in milliseconds for which a
> delegation token is valid.</description>
> +</property>
> +
> +<!-- Secondary NameNode security config -->
> +<property>
> +  <name>dfs.secondary.namenode.keytab.file</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +<property>
> +  <name>dfs.secondary.namenode.kerberos.principal</name>
> +  <value>hdfs/_HOST@HADOOP.COM</value>
> +</property>
> +<property>
> +  <name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<!-- DataNode security config -->
> +<property>
> +  <name>dfs.datanode.data.dir.perm</name>
> +  <value>700</value>
> +</property>
> +<property>
> +  <name>dfs.datanode.keytab.file</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +<property>
> +  <name>dfs.datanode.kerberos.principal</name>
> +  <value>hdfs/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<!-- HTTPS config -->
> +<property>
> +  <name>dfs.http.policy</name>
> +  <value>HTTPS_ONLY</value>
> +</property>
> +<property>
> +  <name>dfs.data.transfer.protection</name>
> +  <value>integrity</value>
> +</property>
> +<property>
> +  <name>dfs.web.authentication.kerberos.keytab</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +<property>
> +  <name>dfs.web.authentication.kerberos.principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +</property>
> +```
> +
> +### Configuration for HDFS HA
> +
> +> For normal configuration, please look at [HDFS High Availability](
> https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-hdfs/
> HDFSHighAvailabilityWithNFS.html)
> +
> +add the following properties in hdfs-site.xml:
> +```
> +<property>
> +  <name>dfs.journalnode.keytab.file</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +<property>
> +  <name>dfs.journalnode.kerberos.principal</name>
> +  <value>hdfs/_HOST@HADOOP.COM</value>
> +</property>
> +<property>
> +  <name>dfs.journalnode.kerberos.internal.spnego.principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +</property>
> +```
> +
> +### Update yarn-site.xml
> +add the following properties:
> +```
> +<!-- ResourceManager security config -->
> +<property>
> +  <name>yarn.resourcemanager.keytab</name>
> +  <value>/etc/hadoop/conf/yarn.keytab</value>
> +</property>
> +<property>
> +  <name>yarn.resourcemanager.principal</name>
> +  <value>yarn/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<!-- NodeManager security config -->
> +<property>
> +  <name>yarn.nodemanager.keytab</name>
> +  <value>/etc/hadoop/conf/yarn.keytab</value>
> +</property>
> +<property>
> +  <name>yarn.nodemanager.principal</name>
> +  <value>yarn/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<!-- HTTPS config -->
> +<property>
> +  <name>mapreduce.jobhistory.http.policy</name>
> +  <value>HTTPS_ONLY</value>
> +</property>
> +
> +<!-- Container executor config -->
> +<property>
> +  <name>yarn.nodemanager.container-executor.class</name>
> +  <value>org.apache.hadoop.yarn.server.nodemanager.
> LinuxContainerExecutor</value>
> +</property>
> +<property>
> +  <name>yarn.nodemanager.linux-container-executor.group</name>
> +  <value>root</value>
> +</property>
> +
> +<!-- Timeline service config, if timeline service enabled -->
> +<property>
> +  <name>yarn.timeline-service.principal</name>
> +  <value>yarn/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<property>
> +  <name>yarn.timeline-service.keytab</name>
> +  <value>/etc/hadoop/conf/yarn.keytab</value>
> +</property>
> +
> +<property>
> +  <name>yarn.timeline-service.http-authentication.type</name>
> +  <value>kerberos</value>
> +</property>
> +
> +<property>
> +  <name>yarn.timeline-service.http-authentication.kerberos.
> principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<property>
> +  <name>yarn.timeline-service.http-authentication.kerberos.keytab</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +</property>
> +
> +<!-- Proxy server config, if web proxy server enabled -->
> +<property>
> +  <name>yarn.web-proxy.keytab</name>
> +  <value>/etc/hadoop/conf/yarn.keytab</value>
> +</property>
> +
> +<property>
> +  <name>yarn.web-proxy.principal</name>
> +  <value>yarn/_HOST@HADOOP.COM</value>
> +</property>
> +```
> +
> +### Update mapred-site.xml
> +add the following properties:
> +```
> +<!-- MapReduce security config -->
> +<property>
> +  <name>mapreduce.jobhistory.keytab</name>
> +  <value>/etc/hadoop/conf/mapred.keytab</value>
> +</property>
> +<property>
> +  <name>mapreduce.jobhistory.principal</name>
> +  <value>mapred/_HOST@HADOOP.COM</value>
> +</property>
> +```
> +
> +### Create and configure ssl-server.xml
> +```
> +cd $HADOOP_HOME
> +cp etc/hadoop/ssl-server.xml.example etc/hadoop/ssl-server.xml
> +```
> +
> +Configure ssl-server.xml:
> +Please look at [How to deploy https](https://github.com/
> intel-bigdata/has/blob/release-1.0.0/doc/deploy-https.md).
> +
> +## 4. Configure container-executor
> +
> +### Create and configure container-executor.cfg
> +
> +Example of container-executor.cfg:
> +```
> +#configured value of yarn.nodemanager.linux-container-executor.group
> +yarn.nodemanager.linux-container-executor.group=root
> +#comma separated list of users who can not run applications
> +banned.users=bin
> +#Prevent other super-users
> +min.user.id=0
> +#comma separated list of system users who CAN run applications
> +allowed.system.users=root,nobody,impala,hive,hdfs,yarn
> +```
> +
> +Set permission:
> +```
> +mv container-executor.cfg /etc/hadoop/conf
> +// Container-executor.cfg should be read-only
> +chmod 400 container-executor.cfg
> +```
> +
> +### Set permission of container-executor:
> +```
> +chmod 6050 container-executor
> +// Test whether configuration is correct
> +container-executor --checksetup
> +```
> +
> +## 5. Setting up cross-realm for distcp
> +
> +### Setup cross realm trust between realms
> +Please look at [How to setup cross-realm](https://github.
> com/intel-bigdata/has/blob/cross-realm/doc/cross-realm.md).
> +
> +### Update core-site.xml
> +
> +Set hadoop.security.auth_to_local parameter in both clusters, add the
> following properties:
> +```
> +<!-- Set up cross realm between A.HADOOP.COM and B.HADOOP.COM -->
> +<property>
> +    <name>hadoop.security.auth_to_local</name>
> +    <value>
> +        RULE:[1:$1@$0](.*@A.HADOOP.COM)s/@A.HADOOP.COM///L
> +        RULE:[2:$1@$0](.*@A.HADOOP.COM)s/@A.HADOOP.COM///L
> +        RULE:[1:$1@$0](.*@B.HADOOP.COM)s/@B.HADOOP.COM///L
> +        RULE:[2:$1@$0](.*@B.HADOOP.COM)s/@B.HADOOP.COM///L
> +    </value>
> +</property>
> +```
> +
> +For detailed mapping rules, please look at [Mapping Rule](
> https://www.cloudera.com/documentation/enterprise/
> 5-9-x/topics/cdh_sg_kerbprin_to_sn.html).
> +
> +Test the mapping:
> +```
> +hadoop org.apache.hadoop.security.HadoopKerberosName hdfs/
> localhost@A.HADOOP.COM
> +```
> +
> +### Update hdfs-site.xml
> +add the following properties in client-side:
> +```
> +<!-- Control allowed realms to authenticate with -->
> +<property>
> +    <name>dfs.namenode.kerberos.principal.pattern</name>
> +    <value>*</value>
> +</property>
> +```
> +
> +### Validate
> +Test trust is setup by running hdfs commands from A.HADOOP.COM to
> B.HADOOP.COM, run the following command on the node of A.HADOOP.COM
> cluster:
> +```
> +hdfs dfs –ls hdfs://<NameNode_FQDN_for_B.HADOOP.COM_Cluster>:8020/
> +```
> +
> +### Distcp between secure clusters
> +
> +Run the distcp command:
> +```
> +hadoop distcp hdfs://<Cluster_A_URI> hdfs://<Cluster_B_URI>
> +```
> +
> +### Distcp between secure and insecure clusters
> +
> +Add the following properties in core-site.xml:
> +```
> +<property>
> +  <name>ipc.client.fallback-to-simple-auth-allowed</name>
> +  <value>true</value>
> +</property>
> +```
> +
> +Or run the distcp command with security setting:
> +```
> +hadoop distcp -D ipc.client.fallback-to-simple-auth-allowed=true
> hdfs://<Cluster_A_URI> hdfs://<Cluster_B_URI>
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/hadoop/hadoop-2.7.2.patch
> ----------------------------------------------------------------------
> diff --git a/has/supports/hadoop/hadoop-2.7.2.patch
> b/has/supports/hadoop/hadoop-2.7.2.patch
> new file mode 100644
> index 0000000..336a83d
> --- /dev/null
> +++ b/has/supports/hadoop/hadoop-2.7.2.patch
> @@ -0,0 +1,152 @@
> +diff --git a/hadoop-common-project/hadoop-auth/pom.xml
> b/hadoop-common-project/hadoop-auth/pom.xml
> +index aa3c2c7..e4f1fd2 100644
> +--- a/hadoop-common-project/hadoop-auth/pom.xml
> ++++ b/hadoop-common-project/hadoop-auth/pom.xml
> +@@ -143,6 +143,11 @@
> +       <artifactId>curator-test</artifactId>
> +       <scope>test</scope>
> +     </dependency>
> ++    <dependency>
> ++      <groupId>org.apache.hadoop</groupId>
> ++      <artifactId>has-client</artifactId>
> ++     <version>1.0.0-SNAPSHOT</version>
> ++    </dependency>
> +   </dependencies>
> +
> +   <build>
> +diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> b/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> +index f7f5f63..80b7aca 100644
> +--- a/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> ++++ b/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> +@@ -44,7 +44,8 @@
> +   public static String getKrb5LoginModuleName() {
> +     return System.getProperty("java.vendor").contains("IBM")
> +       ? "com.ibm.security.auth.module.Krb5LoginModule"
> +-      : "com.sun.security.auth.module.Krb5LoginModule";
> ++//      : "com.sun.security.auth.module.Krb5LoginModule";
> ++      :"org.apache.hadoop.has.client.HasLoginModule";
> +   }
> +
> +   public static Oid getOidInstance(String oidName)
> +diff --git a/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> b/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> +index 65e4166..f5224bb 100644
> +--- a/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> ++++ b/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> +@@ -89,6 +89,8 @@
> +   private static boolean shouldRenewImmediatelyForTests = false;
> +   static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
> +   static final String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
> ++  public static final String HADOOP_SECURITY_AUTHENTICATION_USE_HAS
> ++    = "hadoop.security.authentication.use.has";
> +
> +   /**
> +    * For the purposes of unit tests, we want to test login
> +@@ -460,6 +462,9 @@ public String toString() {
> +       "hadoop-user-kerberos";
> +     private static final String KEYTAB_KERBEROS_CONFIG_NAME =
> +       "hadoop-keytab-kerberos";
> ++    private static final String HAS_KERBEROS_CONFIG_NAME =
> ++      "hadoop-has-kerberos";
> ++
> +
> +     private static final Map<String, String> BASIC_JAAS_OPTIONS =
> +       new HashMap<String,String>();
> +@@ -516,6 +521,29 @@ public String toString() {
> +       KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
> +       KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
> +     }
> ++
> ++    private static final Map<String, String> HAS_KERBEROS_OPTIONS =
> ++        new HashMap<String, String>();
> ++
> ++    static {
> ++      if (IBM_JAVA) {
> ++        HAS_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
> ++      } else {
> ++        HAS_KERBEROS_OPTIONS.put("doNotPrompt", "true");
> ++        HAS_KERBEROS_OPTIONS.put("useTgtTicket", "true");
> ++        HAS_KERBEROS_OPTIONS.put("hadoopSecurityHas",
> conf.get("hadoop.security.has"));
> ++      }
> ++      HAS_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
> ++    }
> ++
> ++    private static final AppConfigurationEntry HAS_KERBEROS_LOGIN =
> ++      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
> ++                                LoginModuleControlFlag.OPTIONAL,
> ++                                HAS_KERBEROS_OPTIONS);
> ++    private static final AppConfigurationEntry[] HAS_KERBEROS_CONF =
> ++      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HAS_KERBEROS_LOGIN,
> ++                                  HADOOP_LOGIN};
> ++
> +     private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
> +       new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
> +                                 LoginModuleControlFlag.REQUIRED,
> +@@ -546,6 +574,8 @@ public String toString() {
> +         }
> +         KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
> +         return KEYTAB_KERBEROS_CONF;
> ++      } else if(HAS_KERBEROS_CONFIG_NAME.equals(appName)) {
> ++        return HAS_KERBEROS_CONF;
> +       }
> +       return null;
> +     }
> +@@ -792,9 +822,16 @@ static void loginUserFromSubject(Subject subject)
> throws IOException {
> +       if (subject == null) {
> +         subject = new Subject();
> +       }
> +-      LoginContext login =
> +-          newLoginContext(authenticationMethod.getLoginAppName(),
> +-                          subject, new HadoopConfiguration());
> ++      LoginContext login = null;
> ++      if (authenticationMethod.equals(AuthenticationMethod.KERBEROS)
> ++        && conf.getBoolean(HADOOP_SECURITY_AUTHENTICATION_USE_HAS,
> false)) {
> ++        login = newLoginContext(HadoopConfiguration.HAS_
> KERBEROS_CONFIG_NAME,
> ++          subject, new HadoopConfiguration());
> ++      } else {
> ++        login = newLoginContext(authenticationMethod.getLoginAppName(),
> ++          subject, new HadoopConfiguration());
> ++      }
> ++
> +       login.login();
> +       UserGroupInformation realUser = new UserGroupInformation(subject);
> +       realUser.setLogin(login);
> +@@ -925,6 +962,39 @@ public void run() {
> +       }
> +     }
> +   }
> ++
> ++  /**
> ++   * Log a user in from a tgt ticket.
> ++   * @throws IOException
> ++   */
> ++  @InterfaceAudience.Public
> ++  @InterfaceStability.Evolving
> ++  public synchronized
> ++  static void loginUserFromHas() throws IOException {
> ++    if (!isSecurityEnabled())
> ++      return;
> ++
> ++    Subject subject = new Subject();
> ++    LoginContext login;
> ++    long start = 0;
> ++    try {
> ++      login = newLoginContext(HadoopConfiguration.HAS_
> KERBEROS_CONFIG_NAME,
> ++            subject, new HadoopConfiguration());
> ++      start = Time.now();
> ++      login.login();
> ++      metrics.loginSuccess.add(Time.now() - start);
> ++      loginUser = new UserGroupInformation(subject);
> ++      loginUser.setLogin(login);
> ++      loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
> ++    } catch (LoginException le) {
> ++      if (start > 0) {
> ++        metrics.loginFailure.add(Time.now() - start);
> ++      }
> ++      throw new IOException("Login failure for " + le, le);
> ++    }
> ++    LOG.info("Login successful for user " + loginUser.getUserName());
> ++  }
> ++
> +   /**
> +    * Log a user in from a keytab file. Loads a user identity from a
> keytab
> +    * file and logs them in. They become the currently logged-in user.
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/hbase/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/hbase/README.md b/has/supports/hbase/README.md
> new file mode 100644
> index 0000000..d55a35c
> --- /dev/null
> +++ b/has/supports/hbase/README.md
> @@ -0,0 +1,154 @@
> +Enable HBase
> +===============
> +
> +## 1. Apply the patch to hadoop-2.5.1 source code
> +```
> +git apply hbase-1.1.10-hadoop-2.5.1.patch
> +```
> +
> +## 2. Build
> +```
> +mvn clean package -DskipTests
> +```
> +
> +## 3. Copy the hadoop-auth jar and hadoop-common jar to hbase lib
> +```
> +cp hadoop/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.5.1.jar
> $HBASE_HOME/lib/
> +cp hadoop/hadoop-common-project/hadoop-common/target/hadoop-common-2.5.1.jar
> $HBASE_HOME/lib/
> +```
> +
> +## 4. Update hbase security configuration
> +
> +### Update conf/hbase-site.xml
> +```
> +<property>
> +  <name>hbase.security.authentication</name>
> +  <value>kerberos</value>
> +</property>
> +
> +<property>
> +  <name>hbase.rpc.engine</name>
> +  <value>org.apache.hadoop.hbase.ipc.SecureRpcEngine</value>
> +</property>
> +
> +<property>
> +  <name>hbase.regionserver.kerberos.principal</name>
> +  <value>hbase/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<property>
> +  <name>hbase.regionserver.keytab.file</name>
> +  <value>/path/to/hbase.keytab</value>
> +</property>
> +
> +<property>
> +  <name>hbase.master.kerberos.principal</name>
> +  <value>hbase/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<property>
> +  <name>hbase.master.keytab.file</name>
> +  <value>/path/to/hbase.keytab</value>
> +</property>
> +```
> +
> +### Update /etc/hbase/conf/zk-jaas.conf
> +```
> +Client {
> +      com.sun.security.auth.module.Krb5LoginModule required
> +      useKeyTab=true
> +      keyTab="/path/to/hbase.keytab"
> +      storeKey=true
> +      useTicketCache=false
> +      principal="hbase/_HOST@HADOOP.COM";
> +};
> +```
> +
> +> Note "_HOST" should be replaced with the specific hostname.
> +
> +### Update conf/hbase-env.sh
> +```
> +export HBASE_OPTS="$HBASE_OPTS -Djava.security.auth.login.
> config=/etc/hbase/conf/zk-jaas.conf"
> +export HBASE_MANAGES_ZK=false
> +```
> +
> +### Update conf/hbase-site.xml on each HBase server host
> +```
> +<configuration>
> +  <property>
> +    <name>hbase.zookeeper.quorum</name>
> +    <value>$ZK_NODES</value>
> +  </property>
> +
> +  <property>
> +    <name>hbase.cluster.distributed</name>
> +    <value>true</value>
> +  </property>
> +</configuration>
> +```
> +
> +## 5. Update hadoop configuration to support JSVC instead of SASL
> +
> +### install jsvc for each host of hadoop cluster
> +```
> +sudo apt-get install jsvc
> +```
> +
> +> Download commons-daemon-xxx.jar from  http://archive.apache.org/
> dist/commons/daemon/binaries/
> +
> +```
> +export CLASSPATH=$CLASSPATH:/path/to/commons-daemon-xxx.jar
> +```
> +
> +### Update hadoop/etc/hadoop/hadoop-env.sh
> +```
> +export HADOOP_SECURE_DN_USER=root
> +export HADOOP_SECURE_DN_PID_DIR=$HADOOP_HOME/$DN_USER/pids
> +export HADOOP_SECURE_DN_LOG_DIR=$HADOOP_HOME/$DN_USER/logs
> +
> +export JSVC_HOME=/usr/bin
> +```
> +
> +### Disable https in hadoop/etc/hadoop/hdfs-site.xml
> +
> +***REMOVE*** following configurations
> +```
> +<!-- HTTPS config -->
> +<property>
> +  <name>dfs.http.policy</name>
> +  <value>HTTPS_ONLY</value>
> +</property>
> +<property>
> +  <name>dfs.data.transfer.protection</name>
> +  <value>integrity</value>
> +</property>
> +```
> +
> +### Update hadoop/etc/hadoop/hdfs-site.xml
> +```
> +<property>
> +    <name>dfs.datanode.address</name>
> +    <value>0.0.0.0:1004</value>
> +</property>
> +<property>
> +    <name>dfs.datanode.http.address</name>
> +    <value>0.0.0.0:1006</value>
> +</property>
> +```
> +
> +> The datanode ports range from 0 to 1023.
> +
> +## 6. Start hbase
> +
> +### Restart namenode and datanode in jsvc
> +```
> +sbin/stop-dfs.sh // stop hdfs first
> +
> +sbin/hadoop-daemon.sh start nameonode // start namenode
> +sbin/start-secure-dns.sh // start datanode
> +```
> +
> +### Start hbase
> +```
> +bin/start-hbase.sh
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
> ----------------------------------------------------------------------
> diff --git a/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
> b/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
> new file mode 100644
> index 0000000..bef04b4
> --- /dev/null
> +++ b/has/supports/hbase/hbase-1.1.10-hadoop-2.5.1.patch
> @@ -0,0 +1,136 @@
> +diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> b/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> +index ca0fce2..b43476d 100644
> +--- a/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> ++++ b/hadoop-common-project/hadoop-auth/src/main/java/org/
> apache/hadoop/security/authentication/util/KerberosUtil.java
> +@@ -44,7 +44,8 @@
> +   public static String getKrb5LoginModuleName() {
> +     return System.getProperty("java.vendor").contains("IBM")
> +       ? "com.ibm.security.auth.module.Krb5LoginModule"
> +-      : "com.sun.security.auth.module.Krb5LoginModule";
> ++//      : "com.sun.security.auth.module.Krb5LoginModule";
> ++      :"org.apache.hadoop.has.client.HasLoginModule";
> +   }
> +
> +   public static Oid getOidInstance(String oidName)
> +diff --git a/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> b/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> +index 4f117fd..7a8fc43 100644
> +--- a/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> ++++ b/hadoop-common-project/hadoop-common/src/main/java/
> org/apache/hadoop/security/UserGroupInformation.java
> +@@ -88,8 +88,10 @@
> +   private static final float TICKET_RENEW_WINDOW = 0.80f;
> +   static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
> +   static final String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
> +-
> +-  /**
> ++  public static final String HADOOP_SECURITY_AUTHENTICATION_USE_HAS
> ++    = "hadoop.security.authentication.use.has";
> ++
> ++  /**
> +    * UgiMetrics maintains UGI activity statistics
> +    * and publishes them through the metrics interfaces.
> +    */
> +@@ -434,6 +436,8 @@ public String toString() {
> +       "hadoop-user-kerberos";
> +     private static final String KEYTAB_KERBEROS_CONFIG_NAME =
> +       "hadoop-keytab-kerberos";
> ++     private static final String HAS_KERBEROS_CONFIG_NAME =
> ++      "hadoop-has-kerberos";
> +
> +     private static final Map<String, String> BASIC_JAAS_OPTIONS =
> +       new HashMap<String,String>();
> +@@ -490,6 +494,29 @@ public String toString() {
> +       KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
> +       KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
> +     }
> ++
> ++    private static final Map<String, String> HAS_KERBEROS_OPTIONS =
> ++        new HashMap<String, String>();
> ++
> ++    static {
> ++      if (IBM_JAVA) {
> ++        HAS_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
> ++      } else {
> ++        HAS_KERBEROS_OPTIONS.put("doNotPrompt", "true");
> ++        HAS_KERBEROS_OPTIONS.put("useTgtTicket", "true");
> ++        HAS_KERBEROS_OPTIONS.put("hadoopSecurityHas",
> conf.get("hadoop.security.has"));
> ++      }
> ++      HAS_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
> ++    }
> ++
> ++    private static final AppConfigurationEntry HAS_KERBEROS_LOGIN =
> ++      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
> ++                                LoginModuleControlFlag.OPTIONAL,
> ++                                HAS_KERBEROS_OPTIONS);
> ++    private static final AppConfigurationEntry[] HAS_KERBEROS_CONF =
> ++      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HAS_KERBEROS_LOGIN,
> ++                                  HADOOP_LOGIN};
> ++
> +     private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
> +       new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
> +                                 LoginModuleControlFlag.REQUIRED,
> +@@ -520,11 +547,45 @@ public String toString() {
> +         }
> +         KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
> +         return KEYTAB_KERBEROS_CONF;
> ++      } else if(HAS_KERBEROS_CONFIG_NAME.equals(appName)) {
> ++        return HAS_KERBEROS_CONF;
> +       }
> +       return null;
> +     }
> +   }
> +
> ++  /**
> ++   * Log a user in from a tgt ticket.
> ++   * @throws IOException
> ++   */
> ++  @InterfaceAudience.Public
> ++  @InterfaceStability.Evolving
> ++  public synchronized
> ++  static void loginUserFromHas() throws IOException {
> ++    if (!isSecurityEnabled())
> ++      return;
> ++
> ++    Subject subject = new Subject();
> ++    LoginContext login;
> ++    long start = 0;
> ++    try {
> ++      login = newLoginContext(HadoopConfiguration.HAS_
> KERBEROS_CONFIG_NAME,
> ++            subject, new HadoopConfiguration());
> ++      start = Time.now();
> ++      login.login();
> ++      metrics.loginSuccess.add(Time.now() - start);
> ++      loginUser = new UserGroupInformation(subject);
> ++      loginUser.setLogin(login);
> ++      loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
> ++    } catch (LoginException le) {
> ++      if (start > 0) {
> ++        metrics.loginFailure.add(Time.now() - start);
> ++      }
> ++      throw new IOException("Login failure for " + le, le);
> ++    }
> ++    LOG.info("Login successful for user " + loginUser.getUserName());
> ++  }
> ++
> +   private static String prependFileAuthority(String keytabPath) {
> +     return keytabPath.startsWith("file://") ? keytabPath
> +         : "file://" + keytabPath;
> +@@ -751,9 +812,16 @@ static void loginUserFromSubject(Subject subject)
> throws IOException {
> +       if (subject == null) {
> +         subject = new Subject();
> +       }
> +-      LoginContext login =
> +-          newLoginContext(authenticationMethod.getLoginAppName(),
> +-                          subject, new HadoopConfiguration());
> ++      LoginContext login = null;
> ++      if (authenticationMethod.equals(AuthenticationMethod.KERBEROS)
> ++        && conf.getBoolean(HADOOP_SECURITY_AUTHENTICATION_USE_HAS,
> false)) {
> ++        login = newLoginContext(HadoopConfiguration.HAS_
> KERBEROS_CONFIG_NAME,
> ++          subject, new HadoopConfiguration());
> ++      } else {
> ++        login = newLoginContext(authenticationMethod.getLoginAppName(),
> ++          subject, new HadoopConfiguration());
> ++      }
> ++
> +       login.login();
> +       UserGroupInformation realUser = new UserGroupInformation(subject);
> +       realUser.setLogin(login);
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/hive/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/hive/README.md b/has/supports/hive/README.md
> new file mode 100644
> index 0000000..2fa1195
> --- /dev/null
> +++ b/has/supports/hive/README.md
> @@ -0,0 +1,55 @@
> +Enable Hive
> +==============
> +
> +## Hive on hdfs
> +
> +### 1. Enabling Kerberos Authentication for HiveServer2
> +> Update hive-site.xml
> +```
> +<property>
> +  <name>hive.server2.authentication</name>
> +  <value>KERBEROS</value>
> +</property>
> +<property>
> +  <name>hive.server2.authentication.kerberos.principal</name>
> +  <value>hive/_HOST@HADOOP.COM</value>
> +</property>
> +<property>
> +  <name>hive.server2.authentication.kerberos.keytab</name>
> +  <value>/path/to/hive.keytab</value>
> +</property>
> +```
> +
> +### 2. Enable impersonation in HiveServer2
> +> Update hive-site.xml
> +```
> +<property>
> +  <name>hive.server2.enable.impersonation</name>
> +  <description>Enable user impersonation for HiveServer2</description>
> +  <value>true</value>
> +</property>
> +```
> +
> +> Update core-site.xml of hadoop
> +```
> +<property>
> +  <name>hadoop.proxyuser.hive.hosts</name>
> +  <value>*</value>
> +</property>
> +<property>
> +  <name>hadoop.proxyuser.hive.groups</name>
> +  <value>*</value>
> +</property>
> +```
> +
> +### 3. Start Hive
> +> start sevice
> +```
> +hive --service metastore &
> +hive --service hiveserver2 &
> +```
> +
> +> start hive shell
> +```
> +hive
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/oozie/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/oozie/README.md b/has/supports/oozie/README.md
> new file mode 100644
> index 0000000..4760f97
> --- /dev/null
> +++ b/has/supports/oozie/README.md
> @@ -0,0 +1,105 @@
> +Enable Oozie
> +===============
> +
> +## 1. Update oozie-site.xml
> +add the following properties:
> +```
> +<property>
> +  <name>oozie.service.AuthorizationService.security.enabled</name>
> +  <value>true</value>
> +  <description>Specifies whether security (user name/admin role) is
> enabled or not.
> +   If it is disabled any user can manage the Oozie system and manage any
> job.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.service.HadoopAccessorService.kerberos.enabled</name>
> +  <value>true</value>
> +</property>
> +
> +<property>
> +  <name>local.realm</name>
> +  <value>HADOOP.COM</value>
> +  <description>HAS Realm.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.service.HadoopAccessorService.keytab.file</name>
> +  <value>/etc/oozie/conf/oozie.keytab</value>
> +  <description>The keytab of the Oozie service.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.service.HadoopAccessorService.kerberos.principal</name>
> +  <value>oozie/_HOST@HADOOP.COM</value>
> +  <description>Principal of Oozie service.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.authentication.kerberos.principal</name>
> +  <value>HTTP/_HOST@HADOOP.COM</value>
> +  <description>Must use the hostname of the Oozie Server.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.authentication.kerberos.keytab</name>
> +  <value>/etc/hadoop/conf/hdfs.keytab</value>
> +  <description>Location of the hdfs keytab file which contains the HTTP
> principal.</description>
> +</property>
> +
> +<property>
> +  <name>oozie.authentication.type</name>
> +  <value>kerberos</value>
> +  <description></description>
> +</property>
> +
> +<property>
> +  <name>oozie.authentication.kerberos.name.rules</name>
> +  <value>DEFAULT</value>
> +  <description>The mapping from principal names to local service user
> names.</description>
> +</property>
> +```
> +
> +> Note "_HOST" should be replaced with the specific hostname.
> +
> +## 2. Start oozie
> +```
> +bin/oozied.sh start
> +```
> +
> +## 3. Using kinit to get the credential cache
> +
> +## 4. Using the Oozie command line tool check the status of Oozie:
> +```
> +bin/oozie.sh admin -oozie http://<host>:11000/oozie -status
> +```
> +
> +return:
> +```
> +System mode: NORMAL
> +```
> +
> +## 5. Using the curl to check the status of Oozie:
> +```
> +curl -i --negotiate -u : "http://<host>:11000/oozie/v1/admin/status"
> +```
> +
> +return:
> +```
> +HTTP/1.1 401 Unauthorized
> +Server: Apache-Coyote/1.1
> +WWW-Authenticate: Negotiate
> +Set-Cookie: hadoop.auth=; Path=/; Expires=Thu, 01-Jan-1970 00:00:00 GMT;
> HttpOnly
> +Content-Type: text/html;charset=utf-8
> +Content-Length: 997
> +Date: Wed, 28 Jun 2017 03:45:28 GMT
> +
> +HTTP/1.1 200 OK
> +Server: Apache-Coyote/1.1
> +WWW-Authenticate: Negotiate YGoGCSqGSIb3EgECAgIAb1swWaADAgEFoQMCAQ+
> iTTBLoAMCARGiRARCzCqLa8uqKUk6UlJfN02KC79DDFpStTBieqHBfhYEm6S
> 1GyrP29Sr3hC4lYl4U42NFSwTb/ySjqu3EpOhBJo5Bg4h
> +Set-Cookie: hadoop.auth="u=oozie&p=oozie/_HOST@EXAMPLE.COM&t=kerberos&e=
> 1498657528799&s=waJ0DZ80kcA2Gc9pYMNIGsIAC5Y="; Path=/; Expires=Wed,
> 28-Jun-2017 13:45:28 GMT; HttpOnly
> +Content-Type: application/json;charset=UTF-8
> +Content-Length: 23
> +Date: Wed, 28 Jun 2017 03:45:28 GMT
> +
> +{"systemMode":"NORMAL"}
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/phoenix/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/phoenix/README.md b/has/supports/phoenix/README.
> md
> new file mode 100644
> index 0000000..05755fb
> --- /dev/null
> +++ b/has/supports/phoenix/README.md
> @@ -0,0 +1,30 @@
> +Enable Phoenix
> +=================
> +
> +## 1. Use SQLline to connect secure hbase
> +```
> +sqlline.py <zk_quorum>:<zk_port>:<zk_hbase_path>:<principal>:<
> keytab_file>
> +// An example:
> +sqlline.py localhost:2181:/hbase:hbase/localhost@EXAMPLE.COM:/home/
> hadoop/keytab/hbase.keytab
> +```
> +
> +## 2. Configuring phoenix query server
> +
> +### Update hbase-site.xml
> +add the following properties:
> +```
> +<property>
> +    <name>phoenix.queryserver.kerberos.principal</name>
> +    <value>hbase/_HOST@HADOOP.COM</value>
> +</property>
> +
> +<property>
> +    <name>phoenix.queryserver.keytab.file</name>
> +    <value>/home/hadoop/keytab/hbase.keytab</value>
> +</property>
> +```
> +
> +### Start phoenix query server
> +```
> +queryserver.py start
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/presto/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/presto/README.md b/has/supports/presto/README.md
> new file mode 100644
> index 0000000..244efe6
> --- /dev/null
> +++ b/has/supports/presto/README.md
> @@ -0,0 +1,24 @@
> +Enable Presto
> +================
> +
> +## 1. Hive Security Configuration
> +Update catalog/hive.properties, Add the following properties:
> +```
> +<!-- Config to connect Kerberized hive metastore -->
> +hive.metastore.authentication.type=KERBEROS
> +hive.metastore.service.principal=hbase/_HOST@HADOOP.COM
> +hive.metastore.client.principal=hbase/_HOST@HADOOP.COM
> +hive.metastore.client.keytab=/path/to/hbase.keytab
> +
> +<!-- Config to connect kerberized hdfs -->
> +hive.hdfs.authentication.type=KERBEROS
> +hive.hdfs.presto.principal=hbase/_HOST@HADOOP.COM
> +hive.hdfs.presto.keytab=/path/to/hbase.keytab
> +```
> +
> +> Note "_HOST" should be replaced with the specific hostname.
> +
> +## 2. Restart presto server
> +```
> +/bin/launcher restart
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/spark/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/spark/README.md b/has/supports/spark/README.md
> new file mode 100644
> index 0000000..f08ce50
> --- /dev/null
> +++ b/has/supports/spark/README.md
> @@ -0,0 +1,26 @@
> +Enable Spark
> +===============
> +
> +## 1. Update spark-env.sh
> +```
> +SPARK_HISTORY_OPTS=-Dspark.history.kerberos.enabled=true \
> +-Dspark.history.kerberos.principal=<sp...@HADOOP.COM> \
> +-Dspark.history.kerberos.keytab=<keytab>
> +```
> +
> +> Note "_HOST" should be replaced with the specific hostname.
> +
> +## 2. Spark-submit job
> +> YARN mode supported only
> +```
> +/bin/spark-submit \
> +  --keytab <keytab> \
> +  --principal <sp...@HADOOP.COM> \
> +  --class <main-class>
> +  --master <master-url> \
> +  --deploy-mode <deploy-mode> \
> +  --conf <key>=<value> \
> +  ... # other options
> +  <application-jar> \
> +  <application-arguments>
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/spark/spark-v2.1.1.patch
> ----------------------------------------------------------------------
> diff --git a/has/supports/spark/spark-v2.1.1.patch
> b/has/supports/spark/spark-v2.1.1.patch
> new file mode 100644
> index 0000000..c7e40b7
> --- /dev/null
> +++ b/has/supports/spark/spark-v2.1.1.patch
> @@ -0,0 +1,51 @@
> +diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
> b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
> +index 443f1f5..1fc66f0 100644
> +--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
> ++++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
> +@@ -553,7 +553,9 @@ object SparkSubmit {
> +
> +     // assure a keytab is available from any place in a JVM
> +     if (clusterManager == YARN || clusterManager == LOCAL) {
> +-      if (args.principal != null) {
> ++      if (args.useHas) {
> ++        UserGroupInformation.loginUserFromHas()
> ++      } else if (args.principal != null) {
> +         require(args.keytab != null, "Keytab must be specified when
> principal is specified")
> +         if (!new File(args.keytab).exists()) {
> +           throw new SparkException(s"Keytab file: ${args.keytab} does
> not exist")
> +diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
> b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
> +index f1761e7..5e48419 100644
> +--- a/core/src/main/scala/org/apache/spark/deploy/
> SparkSubmitArguments.scala
> ++++ b/core/src/main/scala/org/apache/spark/deploy/
> SparkSubmitArguments.scala
> +@@ -78,6 +78,8 @@ private[deploy] class SparkSubmitArguments(args:
> Seq[String], env: Map[String, S
> +   var submissionToRequestStatusFor: String = null
> +   var useRest: Boolean = true // used internally
> +
> ++  var useHas: Boolean = false
> ++
> +   /** Default properties present in the currently defined defaults file.
> */
> +   lazy val defaultSparkProperties: HashMap[String, String] = {
> +     val defaultProperties = new HashMap[String, String]()
> +@@ -438,6 +440,9 @@ private[deploy] class SparkSubmitArguments(args:
> Seq[String], env: Map[String, S
> +       case USAGE_ERROR =>
> +         printUsageAndExit(1)
> +
> ++      case USE_HAS =>
> ++        useHas = true
> ++
> +       case _ =>
> +         throw new IllegalArgumentException(s"Unexpected argument
> '$opt'.")
> +     }
> +diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
> b/launcher/src/main/java/org/apache/spark/launcher/
> SparkSubmitOptionParser.java
> +index 6767cc5..49a7678 100644
> +--- a/launcher/src/main/java/org/apache/spark/launcher/
> SparkSubmitOptionParser.java
> ++++ b/launcher/src/main/java/org/apache/spark/launcher/
> SparkSubmitOptionParser.java
> +@@ -76,6 +76,8 @@ class SparkSubmitOptionParser {
> +   protected final String PRINCIPAL = "--principal";
> +   protected final String QUEUE = "--queue";
> +
> ++  protected final String USE_HAS = "--use-has";
> ++
> +   /**
> +    * This is the canonical list of spark-submit options. Each entry in
> the array contains the
> +    * different aliases for the same option; the first element of each
> entry is the "official"
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/thrift/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/thrift/README.md b/has/supports/thrift/README.md
> new file mode 100644
> index 0000000..db49d38
> --- /dev/null
> +++ b/has/supports/thrift/README.md
> @@ -0,0 +1,70 @@
> +Enable Thrift
> +================
> +
> +## 1. Enable HBase thrift2 server
> +
> +### Update hbase-site.xml
> +add the following properties:
> +```
> +<property>
> +  <name>hbase.thrift.keytab.file</name>
> +  <value>/etc/hbase/conf/hbase.keytab</value>
> +</property>
> +<property>
> +  <name>hbase.thrift.kerberos.principal</name>
> +  <value>hbase/_HOST@HADOOP.COM</value>
> +</property>
> +```
> +
> +### Restart HBase
> +
> +### Start thrift server
> +```
> +hbase thrift2 start
> +```
> +
> +## 2. Write thrift client application
> +Use keytab file to connect thrift server.
> +An example of thrift client:
> +```Java
> +package com.example.thrifttest;
> +
> +import org.apache.hadoop.hbase.thrift.generated.Hbase;
> +import org.apache.hadoop.security.UserGroupInformation;
> +import org.apache.thrift.TException;
> +import org.apache.thrift.protocol.TBinaryProtocol;
> +import org.apache.thrift.protocol.TProtocol;
> +import org.apache.thrift.transport.TSocket;
> +import org.apache.thrift.transport.TTransport;
> +import org.apache.thrift.transport.TTransportException;
> +import java.io.IOException;
> +
> +public class Thrifttest {
> +    static {
> +        final String principal = "hbase/hostname@HADOOP.COM";
> +        final String keyTab = "/etc/hbase/conf/hbase.keytab";
> +        try {
> +            UserGroupInformation.loginUserFromKeytab(user, keyPath);
> +        } catch (IOException e) {
> +            e.printStackTrace();
> +        }
> +    }
> +
> +    private void start()  {
> +        try {
> +            TTransport socket = new TSocket("192.168.x.xxx", 9090);
> +            TProtocol protocol = new TBinaryProtocol(socket, true, true);
> +            Hbase.Client client = new Hbase.Client(protocol);
> +        } catch (TTransportException e) {
> +            e.printStackTrace();
> +        } catch (TException e) {
> +            e.printStackTrace();
> +        }
> +    }
> +
> +    public static void main(String[] args) {
> +        Thrifttest c = new Thrifttest();
> +        c.start();
> +    }
> +}
> +```
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/zookeeper/README.md
> ----------------------------------------------------------------------
> diff --git a/has/supports/zookeeper/README.md b/has/supports/zookeeper/
> README.md
> new file mode 100644
> index 0000000..edc7a0e
> --- /dev/null
> +++ b/has/supports/zookeeper/README.md
> @@ -0,0 +1,59 @@
> +Enable ZooKeeper
> +===================
> +
> +## 1. Create the dependency jars
> +```
> +cd HAS/supports/zookeeper
> +mvn clean package
> +```
> +
> +## 2. Copy the jars to ZooKeeper lib directory
> +```
> +cp HAS/supports/zookeeper/lib/* $ZOOKEEPER_HOME/lib/
> +```
> +
> +## 3. Copy the conf file to ZooKeeper conf directory
> +```
> +cp HAS/supports/zookeeper/conf/* $ZOOKEEPER_HOME/conf/
> +```
> +
> +## 4. Update Zookeeper security configuration files
> +> Update $ZOO_CONF_DIR/jaas.conf
> +> Replace "_HOST" with the specific hostname for each host
> +```
> +Server {
> +  com.sun.security.auth.module.Krb5LoginModule required
> +  useKeyTab=true
> +  keyTab="/path/to/zookeeper.keytab"
> +  storeKey=true
> +  useTicketCache=true
> +  principal="zookeeper/_HOST@HADOOP.COM";
> +};
> +
> +Client {
> +  com.sun.security.auth.module.Krb5LoginModule required
> +  useKeyTab=true
> +  keyTab="/home/hdfs/keytab/hbase.keytab"
> +  storeKey=true
> +  useTicketCache=false
> +  principal="zookeeper/_HOST@HADOOP.COM";
> +};
> +```
> +
> +> Update conf/zoo.cfg
> +```
> +authProvider.1=org.apache.zookeeper.server.auth.
> SASLAuthenticationProvider
> +jaasLoginRenew=3600000
> +kerberos.removeHostFromPrincipal=true
> +kerberos.removeRealmFromPrincipal=true
> +```
> +
> +## 5. Verifying the configuration
> +```
> +zkCli.sh -server hostname:port
> +create /znode1 data sasl:zookeeper:cdwra
> +getAcl /znode1
> +```
> +
> +> The results from getAcl should show that the proper scheme and
> permissions were applied to the znode.
> +> like: 'sasl,'zookeeper
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/zookeeper/conf/jaas.conf
> ----------------------------------------------------------------------
> diff --git a/has/supports/zookeeper/conf/jaas.conf
> b/has/supports/zookeeper/conf/jaas.conf
> new file mode 100644
> index 0000000..62db69a
> --- /dev/null
> +++ b/has/supports/zookeeper/conf/jaas.conf
> @@ -0,0 +1,13 @@
> + Server {
> +      com.sun.security.auth.module.Krb5LoginModule required
> +      useKeyTab=true
> +      keyTab="/etc/zookeeper/zookeeper.keytab"
> +      storeKey=true
> +      useTicketCache=true
> +      principal="zookeeper/localhost@HADOOP.COM";
> +  };
> +
> +Client {
> +  org.apache.hadoop.has.client.HasLoginModule required
> +  useTgtTicket=true;
> +};
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/zookeeper/conf/java.env
> ----------------------------------------------------------------------
> diff --git a/has/supports/zookeeper/conf/java.env
> b/has/supports/zookeeper/conf/java.env
> new file mode 100644
> index 0000000..bb7098b
> --- /dev/null
> +++ b/has/supports/zookeeper/conf/java.env
> @@ -0,0 +1 @@
> +export JVMFLAGS="-Djava.security.auth.login.config=$ZOOKEEPER_
> HOME/conf/jaas.conf"
>
> http://git-wip-us.apache.org/repos/asf/directory-kerby/
> blob/be580566/has/supports/zookeeper/pom.xml
> ----------------------------------------------------------------------
> diff --git a/has/supports/zookeeper/pom.xml b/has/supports/zookeeper/pom.
> xml
> new file mode 100644
> index 0000000..d2cdc13
> --- /dev/null
> +++ b/has/supports/zookeeper/pom.xml
> @@ -0,0 +1,47 @@
> +<?xml version="1.0" encoding="UTF-8"?>
> +<project xmlns="http://maven.apache.org/POM/4.0.0"
> +         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
> +         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
> http://maven.apache.org/xsd/maven-4.0.0.xsd">
> +  <modelVersion>4.0.0</modelVersion>
> +
> +  <parent>
> +    <groupId>org.apache.hadoop</groupId>
> +    <artifactId>has-project</artifactId>
> +    <version>1.0.0-SNAPSHOT</version>
> +  </parent>
> +
> +  <artifactId>zookeeper-dist</artifactId>
> +  <description>ZooKeeper dist</description>
> +  <name>ZooKeeper dist</name>
> +
> +  <dependencies>
> +    <dependency>
> +      <groupId>org.apache.hadoop</groupId>
> +      <artifactId>has-client</artifactId>
> +      <version>${project.version}</version>
> +    </dependency>
> +  </dependencies>
> +
> +  <build>
> +    <plugins>
> +      <plugin>
> +        <groupId>org.apache.maven.plugins</groupId>
> +        <artifactId>maven-dependency-plugin</artifactId>
> +        <executions>
> +          <execution>
> +            <id>copy</id>
> +            <phase>package</phase>
> +            <goals>
> +              <goal>copy-dependencies</goal>
> +            </goals>
> +            <configuration>
> +              <outputDirectory>lib</outputDirectory>
> +            </configuration>
> +          </execution>
> +        </executions>
> +      </plugin>
> +    </plugins>
> +  </build>
> +
> +
> +</project>
>
>


-- 
Colm O hEigeartaigh

Talend Community Coder
http://coders.talend.com