You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by pl...@apache.org on 2018/03/27 05:42:14 UTC

directory-kerby git commit: DIRKRB-699 Enable Hadoop/HBase/Hive/Zookeeper.

Repository: directory-kerby
Updated Branches:
  refs/heads/trunk 3d8bcc590 -> 346c2fabb


DIRKRB-699 Enable Hadoop/HBase/Hive/Zookeeper.


Project: http://git-wip-us.apache.org/repos/asf/directory-kerby/repo
Commit: http://git-wip-us.apache.org/repos/asf/directory-kerby/commit/346c2fab
Tree: http://git-wip-us.apache.org/repos/asf/directory-kerby/tree/346c2fab
Diff: http://git-wip-us.apache.org/repos/asf/directory-kerby/diff/346c2fab

Branch: refs/heads/trunk
Commit: 346c2fabbf0630a9dd712a9f07b307b958b5f71d
Parents: 3d8bcc5
Author: plusplusjiajia <ji...@intel.com>
Authored: Tue Mar 27 13:39:06 2018 +0800
Committer: plusplusjiajia <ji...@intel.com>
Committed: Tue Mar 27 13:39:06 2018 +0800

----------------------------------------------------------------------
 has-project/supports/hadoop/README.md           | 333 +++++++++++++++++++
 has-project/supports/hadoop/hadoop-2.7.2.patch  | 152 +++++++++
 has-project/supports/hbase/README.md            | 156 +++++++++
 .../supports/hbase/hbase-hadoop-2.5.1.patch     | 136 ++++++++
 has-project/supports/hive/README.md             |  55 +++
 has-project/supports/zookeeper/README.md        |  59 ++++
 has-project/supports/zookeeper/conf/jaas.conf   |  13 +
 has-project/supports/zookeeper/conf/java.env    |   1 +
 has-project/supports/zookeeper/pom.xml          |  47 +++
 9 files changed, 952 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/346c2fab/has-project/supports/hadoop/README.md
----------------------------------------------------------------------
diff --git a/has-project/supports/hadoop/README.md b/has-project/supports/hadoop/README.md
new file mode 100644
index 0000000..4e79388
--- /dev/null
+++ b/has-project/supports/hadoop/README.md
@@ -0,0 +1,333 @@
+Enable Hadoop
+================
+
+## 1. Build Hadoop
+
+### Apply the patch to hadoop-2.7.2 source code
+```
+git apply hadoop-2.7.2.patch
+```
+
+### Build Hadoop
+```
+mvn package -Pdist,native -Dtar -DskipTests -Dmaven.javadoc.skip=true -Dcontainer-executor.conf.dir=/etc/hadoop/conf
+```
+
+### Redeploy Hadoop
+
+## 2. Distribute and configure Keytab files
+
+### Create keytab and deploy krb5.conf and has-client.conf
+
+### Distribute keytab files to the corresponding nodes.
+
+### Set permission of keytab files
+
+## 3. Update hadoop configuration files
+ 
+### Update core-site.xml
+add the following properties:
+```
+<property>
+  <name>hadoop.security.authorization</name>
+  <value>true</value>
+</property>
+<property>
+  <name>hadoop.security.authentication</name>
+  <value>kerberos</value>
+</property>
+<property>
+   <name>hadoop.security.authentication.use.has</name>
+   <value>true</value>
+</property>
+```
+
+### Update hdfs-site.xml
+add the following properties:
+```
+<!-- General HDFS security config -->
+<property>
+  <name>dfs.block.access.token.enable</name>
+  <value>true</value>
+</property>
+
+<!-- NameNode security config -->
+<property>
+  <name>dfs.namenode.keytab.file</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+<property>
+  <name>dfs.namenode.kerberos.principal</name>
+  <value>hdfs/_HOST@HADOOP.COM</value>
+</property>
+<property>
+  <name>dfs.namenode.kerberos.internal.spnego.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+</property>
+<property>
+  <name>dfs.namenode.delegation.token.max-lifetime</name>
+  <value>604800000</value>
+  <description>The maximum lifetime in milliseconds for which a delegation token is valid.</description>
+</property>
+
+<!-- Secondary NameNode security config -->
+<property>
+  <name>dfs.secondary.namenode.keytab.file</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+<property>
+  <name>dfs.secondary.namenode.kerberos.principal</name>
+  <value>hdfs/_HOST@HADOOP.COM</value>
+</property>
+<property>
+  <name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+</property>
+
+<!-- DataNode security config -->
+<property>
+  <name>dfs.datanode.data.dir.perm</name>
+  <value>700</value>
+</property>
+<property>
+  <name>dfs.datanode.keytab.file</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+<property>
+  <name>dfs.datanode.kerberos.principal</name>
+  <value>hdfs/_HOST@HADOOP.COM</value>
+</property>
+
+<!-- HTTPS config -->
+<property>
+  <name>dfs.http.policy</name>
+  <value>HTTPS_ONLY</value>
+</property>
+<property>
+  <name>dfs.data.transfer.protection</name>
+  <value>integrity</value>
+</property>
+<property>
+  <name>dfs.web.authentication.kerberos.keytab</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+<property>
+  <name>dfs.web.authentication.kerberos.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+</property>
+```
+
+### Configuration for HDFS HA
+
+> For normal configuration, please look at [HDFS High Availability](https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-hdfs/HDFSHighAvailabilityWithNFS.html)
+
+add the following properties in hdfs-site.xml:
+```
+<property>
+  <name>dfs.journalnode.keytab.file</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+<property>
+  <name>dfs.journalnode.kerberos.principal</name>
+  <value>hdfs/_HOST@HADOOP.COM</value>
+</property>
+<property>
+  <name>dfs.journalnode.kerberos.internal.spnego.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+</property>
+```
+
+### Update yarn-site.xml
+add the following properties:
+```
+<!-- ResourceManager security config -->
+<property>
+  <name>yarn.resourcemanager.keytab</name>
+  <value>/etc/hadoop/conf/yarn.keytab</value>
+</property>
+<property>
+  <name>yarn.resourcemanager.principal</name>
+  <value>yarn/_HOST@HADOOP.COM</value>
+</property>
+
+<!-- NodeManager security config -->
+<property>
+  <name>yarn.nodemanager.keytab</name>
+  <value>/etc/hadoop/conf/yarn.keytab</value>
+</property>
+<property>
+  <name>yarn.nodemanager.principal</name> 
+  <value>yarn/_HOST@HADOOP.COM</value>
+</property>
+
+<!-- HTTPS config -->
+<property>
+  <name>mapreduce.jobhistory.http.policy</name>
+  <value>HTTPS_ONLY</value>
+</property>
+
+<!-- Container executor config -->
+<property>
+  <name>yarn.nodemanager.container-executor.class</name>
+  <value>org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor</value>
+</property>
+<property>
+  <name>yarn.nodemanager.linux-container-executor.group</name>
+  <value>root</value>
+</property>
+
+<!-- Timeline service config, if timeline service enabled -->
+<property>
+  <name>yarn.timeline-service.principal</name>
+  <value>yarn/_HOST@HADOOP.COM</value>
+</property>
+
+<property>
+  <name>yarn.timeline-service.keytab</name>
+  <value>/etc/hadoop/conf/yarn.keytab</value>
+</property>
+
+<property>
+  <name>yarn.timeline-service.http-authentication.type</name>
+  <value>kerberos</value>
+</property>
+
+<property>
+  <name>yarn.timeline-service.http-authentication.kerberos.principal</name>
+  <value>HTTP/_HOST@HADOOP.COM</value>
+</property>
+
+<property>
+  <name>yarn.timeline-service.http-authentication.kerberos.keytab</name>
+  <value>/etc/hadoop/conf/hdfs.keytab</value>
+</property>
+
+<!-- Proxy server config, if web proxy server enabled -->
+<property>
+  <name>yarn.web-proxy.keytab</name>
+  <value>/etc/hadoop/conf/yarn.keytab</value>
+</property>
+
+<property>
+  <name>yarn.web-proxy.principal</name>
+  <value>yarn/_HOST@HADOOP.COM</value>
+</property>
+```
+
+### Update mapred-site.xml
+add the following properties:
+```
+<!-- MapReduce security config -->
+<property>
+  <name>mapreduce.jobhistory.keytab</name>
+  <value>/etc/hadoop/conf/mapred.keytab</value>
+</property>
+<property>
+  <name>mapreduce.jobhistory.principal</name>
+  <value>mapred/_HOST@HADOOP.COM</value>
+</property>
+```
+
+### Create and configure ssl-server.xml
+```
+cd $HADOOP_HOME
+cp etc/hadoop/ssl-server.xml.example etc/hadoop/ssl-server.xml
+```
+
+Configure ssl-server.xml:
+Please look at [How to deploy https](https://github.com/apache/directory-kerby/blob/trunk/has-project/docs/deploy-https.md).
+
+## 4. Configure container-executor
+
+### Create and configure container-executor.cfg
+
+Example of container-executor.cfg:
+```
+#configured value of yarn.nodemanager.linux-container-executor.group
+yarn.nodemanager.linux-container-executor.group=root
+#comma separated list of users who can not run applications
+banned.users=bin
+#Prevent other super-users
+min.user.id=0
+#comma separated list of system users who CAN run applications
+allowed.system.users=root,nobody,impala,hive,hdfs,yarn
+```
+
+Set permission:
+```
+mv container-executor.cfg /etc/hadoop/conf
+// Container-executor.cfg should be read-only
+chmod 400 container-executor.cfg
+```
+
+### Set permission of container-executor:
+```
+chmod 6050 container-executor
+// Test whether configuration is correct
+container-executor --checksetup
+```
+
+## 5. Setting up cross-realm for DistCp
+
+### Setup cross realm trust between realms
+Please look at [How to setup cross-realm](https://github.com/apache/directory-kerby/blob/trunk/has-project/docs/cross-realm.md).
+
+### Update core-site.xml
+
+Set hadoop.security.auth_to_local parameter in both clusters, add the following properties:
+```
+<!-- Set up cross realm between A.HADOOP.COM and B.HADOOP.COM -->
+<property>
+    <name>hadoop.security.auth_to_local</name>
+    <value> 
+        RULE:[1:$1@$0](.*@A.HADOOP.COM)s/@A.HADOOP.COM///L
+        RULE:[2:$1@$0](.*@A.HADOOP.COM)s/@A.HADOOP.COM///L
+        RULE:[1:$1@$0](.*@B.HADOOP.COM)s/@B.HADOOP.COM///L
+        RULE:[2:$1@$0](.*@B.HADOOP.COM)s/@B.HADOOP.COM///L
+    </value>
+</property>
+```
+
+
+Test the mapping:
+```
+hadoop org.apache.hadoop.security.HadoopKerberosName hdfs/localhost@A.HADOOP.COM
+```
+
+### Update hdfs-site.xml
+add the following properties in client-side:
+```
+<!-- Control allowed realms to authenticate with -->
+<property>
+    <name>dfs.namenode.kerberos.principal.pattern</name>
+    <value>*</value>
+</property>
+```
+
+### Validate
+Test trust is setup by running hdfs commands from A.HADOOP.COM to B.HADOOP.COM, run the following command on the node of A.HADOOP.COM cluster:
+```
+hdfs dfs –ls hdfs://<NameNode_FQDN_for_B.HADOOP.COM_Cluster>:8020/
+```
+
+### Distcp between secure clusters
+
+Run the distcp command:
+```
+hadoop distcp hdfs://<Cluster_A_URI> hdfs://<Cluster_B_URI>
+```
+
+### Distcp between secure and insecure clusters
+
+Add the following properties in core-site.xml:
+```
+<property> 
+  <name>ipc.client.fallback-to-simple-auth-allowed</name>
+  <value>true</value>  
+</property>
+```
+
+Or run the distcp command with security setting:
+```
+hadoop distcp -D ipc.client.fallback-to-simple-auth-allowed=true hdfs://<Cluster_A_URI> hdfs://<Cluster_B_URI>
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/346c2fab/has-project/supports/hadoop/hadoop-2.7.2.patch
----------------------------------------------------------------------
diff --git a/has-project/supports/hadoop/hadoop-2.7.2.patch b/has-project/supports/hadoop/hadoop-2.7.2.patch
new file mode 100644
index 0000000..85c7c3f
--- /dev/null
+++ b/has-project/supports/hadoop/hadoop-2.7.2.patch
@@ -0,0 +1,152 @@
+diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml
+index aa3c2c7..e4f1fd2 100644
+--- a/hadoop-common-project/hadoop-auth/pom.xml
++++ b/hadoop-common-project/hadoop-auth/pom.xml
+@@ -143,6 +143,11 @@
+       <artifactId>curator-test</artifactId>
+       <scope>test</scope>
+     </dependency>
++    <dependency>
++      <groupId>org.apache.kerby</groupId>
++      <artifactId>has-client</artifactId>
++     <version>1.0.0-SNAPSHOT</version>
++    </dependency>
+   </dependencies>
+ 
+   <build>
+diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+index f7f5f63..80b7aca 100644
+--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
++++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+@@ -44,7 +44,8 @@
+   public static String getKrb5LoginModuleName() {
+     return System.getProperty("java.vendor").contains("IBM")
+       ? "com.ibm.security.auth.module.Krb5LoginModule"
+-      : "com.sun.security.auth.module.Krb5LoginModule";
++//      : "com.sun.security.auth.module.Krb5LoginModule";
++      :"org.apache.kerby.has.client.HasLoginModule";
+   }
+   
+   public static Oid getOidInstance(String oidName) 
+diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+index 65e4166..f5224bb 100644
+--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
++++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+@@ -89,6 +89,8 @@
+   private static boolean shouldRenewImmediatelyForTests = false;
+   static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
+   static final String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
++  public static final String HADOOP_SECURITY_AUTHENTICATION_USE_HAS
++    = "hadoop.security.authentication.use.has";
+ 
+   /**
+    * For the purposes of unit tests, we want to test login
+@@ -460,6 +462,9 @@ public String toString() {
+       "hadoop-user-kerberos";
+     private static final String KEYTAB_KERBEROS_CONFIG_NAME = 
+       "hadoop-keytab-kerberos";
++    private static final String HAS_KERBEROS_CONFIG_NAME =
++      "hadoop-has-kerberos";
++
+ 
+     private static final Map<String, String> BASIC_JAAS_OPTIONS =
+       new HashMap<String,String>();
+@@ -516,6 +521,29 @@ public String toString() {
+       KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
+       KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);      
+     }
++
++    private static final Map<String, String> HAS_KERBEROS_OPTIONS =
++        new HashMap<String, String>();
++
++    static {
++      if (IBM_JAVA) {
++        HAS_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
++      } else {
++        HAS_KERBEROS_OPTIONS.put("doNotPrompt", "true");
++        HAS_KERBEROS_OPTIONS.put("useTgtTicket", "true");
++        HAS_KERBEROS_OPTIONS.put("hadoopSecurityHas", conf.get("hadoop.security.has"));
++      }
++      HAS_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
++    }
++
++    private static final AppConfigurationEntry HAS_KERBEROS_LOGIN =
++      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
++                                LoginModuleControlFlag.OPTIONAL,
++                                HAS_KERBEROS_OPTIONS);
++    private static final AppConfigurationEntry[] HAS_KERBEROS_CONF =
++      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HAS_KERBEROS_LOGIN,
++                                  HADOOP_LOGIN};
++
+     private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
+       new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
+                                 LoginModuleControlFlag.REQUIRED,
+@@ -546,6 +574,8 @@ public String toString() {
+         }
+         KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
+         return KEYTAB_KERBEROS_CONF;
++      } else if(HAS_KERBEROS_CONFIG_NAME.equals(appName)) {
++        return HAS_KERBEROS_CONF;
+       }
+       return null;
+     }
+@@ -792,9 +822,16 @@ static void loginUserFromSubject(Subject subject) throws IOException {
+       if (subject == null) {
+         subject = new Subject();
+       }
+-      LoginContext login =
+-          newLoginContext(authenticationMethod.getLoginAppName(), 
+-                          subject, new HadoopConfiguration());
++      LoginContext login = null;
++      if (authenticationMethod.equals(AuthenticationMethod.KERBEROS)
++        && conf.getBoolean(HADOOP_SECURITY_AUTHENTICATION_USE_HAS, false)) {
++        login = newLoginContext(HadoopConfiguration.HAS_KERBEROS_CONFIG_NAME,
++          subject, new HadoopConfiguration());
++      } else {
++        login = newLoginContext(authenticationMethod.getLoginAppName(),
++          subject, new HadoopConfiguration());
++      }
++
+       login.login();
+       UserGroupInformation realUser = new UserGroupInformation(subject);
+       realUser.setLogin(login);
+@@ -925,6 +962,39 @@ public void run() {
+       }
+     }
+   }
++
++  /**
++   * Log a user in from a tgt ticket.
++   * @throws IOException
++   */
++  @InterfaceAudience.Public
++  @InterfaceStability.Evolving
++  public synchronized
++  static void loginUserFromHas() throws IOException {
++    if (!isSecurityEnabled())
++      return;
++
++    Subject subject = new Subject();
++    LoginContext login;
++    long start = 0;
++    try {
++      login = newLoginContext(HadoopConfiguration.HAS_KERBEROS_CONFIG_NAME,
++            subject, new HadoopConfiguration());
++      start = Time.now();
++      login.login();
++      metrics.loginSuccess.add(Time.now() - start);
++      loginUser = new UserGroupInformation(subject);
++      loginUser.setLogin(login);
++      loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
++    } catch (LoginException le) {
++      if (start > 0) {
++        metrics.loginFailure.add(Time.now() - start);
++      }
++      throw new IOException("Login failure for " + le, le);
++    }
++    LOG.info("Login successful for user " + loginUser.getUserName());
++  }
++
+   /**
+    * Log a user in from a keytab file. Loads a user identity from a keytab
+    * file and logs them in. They become the currently logged-in user.

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/346c2fab/has-project/supports/hbase/README.md
----------------------------------------------------------------------
diff --git a/has-project/supports/hbase/README.md b/has-project/supports/hbase/README.md
new file mode 100644
index 0000000..ce3a2bf
--- /dev/null
+++ b/has-project/supports/hbase/README.md
@@ -0,0 +1,156 @@
+Enable HBase
+===============
+
+The Hadoop version used in HBase should be checked, from HBase 1.0.0 to 1.3.1 the dependency of hadoop version is 2.5.1.
+
+## 1. Apply the patch to hadoop-2.5.1 source code
+```
+git apply hbase-hadoop-2.5.1.patch
+```
+
+## 2. Build
+```
+mvn clean package -DskipTests
+```
+
+## 3. Copy the hadoop-auth jar and hadoop-common jar to hbase lib
+```
+cp hadoop/hadoop-common-project/hadoop-auth/target/hadoop-auth-2.5.1.jar $HBASE_HOME/lib/
+cp hadoop/hadoop-common-project/hadoop-common/target/hadoop-common-2.5.1.jar $HBASE_HOME/lib/
+```
+
+## 4. Update hbase security configuration
+
+### Update conf/hbase-site.xml
+```
+<property>
+  <name>hbase.security.authentication</name>
+  <value>kerberos</value> 
+</property>
+
+<property>
+  <name>hbase.rpc.engine</name>
+  <value>org.apache.hadoop.hbase.ipc.SecureRpcEngine</value>
+</property>
+
+<property> 
+  <name>hbase.regionserver.kerberos.principal</name> 
+  <value>hbase/_HOST@HADOOP.COM</value> 
+</property> 
+
+<property> 
+  <name>hbase.regionserver.keytab.file</name> 
+  <value>/path/to/hbase.keytab</value> 
+</property>
+
+<property> 
+  <name>hbase.master.kerberos.principal</name> 
+  <value>hbase/_HOST@HADOOP.COM</value> 
+</property> 
+
+<property> 
+  <name>hbase.master.keytab.file</name> 
+  <value>/path/to/hbase.keytab</value> 
+</property>
+```
+
+### Update /etc/hbase/conf/zk-jaas.conf
+```
+Client {
+      com.sun.security.auth.module.Krb5LoginModule required
+      useKeyTab=true
+      keyTab="/path/to/hbase.keytab"
+      storeKey=true
+      useTicketCache=false
+      principal="hbase/_HOST@HADOOP.COM";
+};
+```
+
+> Note "_HOST" should be replaced with the specific hostname.
+
+### Update conf/hbase-env.sh
+```
+export HBASE_OPTS="$HBASE_OPTS -Djava.security.auth.login.config=/etc/hbase/conf/zk-jaas.conf"
+export HBASE_MANAGES_ZK=false
+```
+
+### Update conf/hbase-site.xml on each HBase server host
+```
+<configuration>
+  <property>
+    <name>hbase.zookeeper.quorum</name>
+    <value>$ZK_NODES</value>
+  </property>
+   
+  <property>
+    <name>hbase.cluster.distributed</name>
+    <value>true</value>
+  </property>
+</configuration>
+```
+
+## 5. Update hadoop configuration to support JSVC instead of SASL
+
+### install jsvc for each host of hadoop cluster
+```
+sudo apt-get install jsvc
+```
+
+> Download commons-daemon-xxx.jar from  http://archive.apache.org/dist/commons/daemon/binaries/
+
+```
+export CLASSPATH=$CLASSPATH:/path/to/commons-daemon-xxx.jar
+```
+
+### Update hadoop/etc/hadoop/hadoop-env.sh
+```
+export HADOOP_SECURE_DN_USER=root
+export HADOOP_SECURE_DN_PID_DIR=$HADOOP_HOME/$DN_USER/pids
+export HADOOP_SECURE_DN_LOG_DIR=$HADOOP_HOME/$DN_USER/logs
+
+export JSVC_HOME=/usr/bin
+```
+
+### Disable https in hadoop/etc/hadoop/hdfs-site.xml
+
+***REMOVE*** following configurations
+```
+<!-- HTTPS config -->
+<property>
+  <name>dfs.http.policy</name>
+  <value>HTTPS_ONLY</value>
+</property>
+<property>
+  <name>dfs.data.transfer.protection</name>
+  <value>integrity</value>
+</property>
+```
+
+### Update hadoop/etc/hadoop/hdfs-site.xml
+```
+<property>
+    <name>dfs.datanode.address</name>
+    <value>0.0.0.0:1004</value> 
+</property>
+<property>
+    <name>dfs.datanode.http.address</name>
+    <value>0.0.0.0:1006</value>
+</property>
+```
+
+> The datanode ports range from 0 to 1023.
+
+## 6. Start HBase
+
+### Restart namenode and datanode in jsvc
+```
+sbin/stop-dfs.sh // stop hdfs first
+
+sbin/hadoop-daemon.sh start nameonode // start namenode
+sbin/start-secure-dns.sh // start datanode
+```
+
+### Start HBase
+```
+bin/start-hbase.sh
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/346c2fab/has-project/supports/hbase/hbase-hadoop-2.5.1.patch
----------------------------------------------------------------------
diff --git a/has-project/supports/hbase/hbase-hadoop-2.5.1.patch b/has-project/supports/hbase/hbase-hadoop-2.5.1.patch
new file mode 100644
index 0000000..f00cec5
--- /dev/null
+++ b/has-project/supports/hbase/hbase-hadoop-2.5.1.patch
@@ -0,0 +1,136 @@
+diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+index ca0fce2..b43476d 100644
+--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
++++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
+@@ -44,7 +44,8 @@
+   public static String getKrb5LoginModuleName() {
+     return System.getProperty("java.vendor").contains("IBM")
+       ? "com.ibm.security.auth.module.Krb5LoginModule"
+-      : "com.sun.security.auth.module.Krb5LoginModule";
++//      : "com.sun.security.auth.module.Krb5LoginModule";
++      :"org.apache.kerby.has.client.HasLoginModule";
+   }
+   
+   public static Oid getOidInstance(String oidName) 
+diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+index 4f117fd..7a8fc43 100644
+--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
++++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+@@ -88,8 +88,10 @@
+   private static final float TICKET_RENEW_WINDOW = 0.80f;
+   static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
+   static final String HADOOP_PROXY_USER = "HADOOP_PROXY_USER";
+-  
+-  /** 
++  public static final String HADOOP_SECURITY_AUTHENTICATION_USE_HAS
++    = "hadoop.security.authentication.use.has";
++
++  /**
+    * UgiMetrics maintains UGI activity statistics
+    * and publishes them through the metrics interfaces.
+    */
+@@ -434,6 +436,8 @@ public String toString() {
+       "hadoop-user-kerberos";
+     private static final String KEYTAB_KERBEROS_CONFIG_NAME = 
+       "hadoop-keytab-kerberos";
++     private static final String HAS_KERBEROS_CONFIG_NAME =
++      "hadoop-has-kerberos";
+ 
+     private static final Map<String, String> BASIC_JAAS_OPTIONS =
+       new HashMap<String,String>();
+@@ -490,6 +494,29 @@ public String toString() {
+       KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
+       KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);      
+     }
++
++    private static final Map<String, String> HAS_KERBEROS_OPTIONS =
++        new HashMap<String, String>();
++
++    static {
++      if (IBM_JAVA) {
++        HAS_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
++      } else {
++        HAS_KERBEROS_OPTIONS.put("doNotPrompt", "true");
++        HAS_KERBEROS_OPTIONS.put("useTgtTicket", "true");
++        HAS_KERBEROS_OPTIONS.put("hadoopSecurityHas", conf.get("hadoop.security.has"));
++      }
++      HAS_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
++    }
++
++    private static final AppConfigurationEntry HAS_KERBEROS_LOGIN =
++      new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
++                                LoginModuleControlFlag.OPTIONAL,
++                                HAS_KERBEROS_OPTIONS);
++    private static final AppConfigurationEntry[] HAS_KERBEROS_CONF =
++      new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HAS_KERBEROS_LOGIN,
++                                  HADOOP_LOGIN};
++
+     private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
+       new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
+                                 LoginModuleControlFlag.REQUIRED,
+@@ -520,11 +547,45 @@ public String toString() {
+         }
+         KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal);
+         return KEYTAB_KERBEROS_CONF;
++      } else if(HAS_KERBEROS_CONFIG_NAME.equals(appName)) {
++        return HAS_KERBEROS_CONF;
+       }
+       return null;
+     }
+   }
+ 
++  /**
++   * Log a user in from a tgt ticket.
++   * @throws IOException
++   */
++  @InterfaceAudience.Public
++  @InterfaceStability.Evolving
++  public synchronized
++  static void loginUserFromHas() throws IOException {
++    if (!isSecurityEnabled())
++      return;
++
++    Subject subject = new Subject();
++    LoginContext login;
++    long start = 0;
++    try {
++      login = newLoginContext(HadoopConfiguration.HAS_KERBEROS_CONFIG_NAME,
++            subject, new HadoopConfiguration());
++      start = Time.now();
++      login.login();
++      metrics.loginSuccess.add(Time.now() - start);
++      loginUser = new UserGroupInformation(subject);
++      loginUser.setLogin(login);
++      loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
++    } catch (LoginException le) {
++      if (start > 0) {
++        metrics.loginFailure.add(Time.now() - start);
++      }
++      throw new IOException("Login failure for " + le, le);
++    }
++    LOG.info("Login successful for user " + loginUser.getUserName());
++  }
++
+   private static String prependFileAuthority(String keytabPath) {
+     return keytabPath.startsWith("file://") ? keytabPath
+         : "file://" + keytabPath;
+@@ -751,9 +812,16 @@ static void loginUserFromSubject(Subject subject) throws IOException {
+       if (subject == null) {
+         subject = new Subject();
+       }
+-      LoginContext login =
+-          newLoginContext(authenticationMethod.getLoginAppName(), 
+-                          subject, new HadoopConfiguration());
++      LoginContext login = null;
++      if (authenticationMethod.equals(AuthenticationMethod.KERBEROS)
++        && conf.getBoolean(HADOOP_SECURITY_AUTHENTICATION_USE_HAS, false)) {
++        login = newLoginContext(HadoopConfiguration.HAS_KERBEROS_CONFIG_NAME,
++          subject, new HadoopConfiguration());
++      } else {
++        login = newLoginContext(authenticationMethod.getLoginAppName(),
++          subject, new HadoopConfiguration());
++      }
++
+       login.login();
+       UserGroupInformation realUser = new UserGroupInformation(subject);
+       realUser.setLogin(login);

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/346c2fab/has-project/supports/hive/README.md
----------------------------------------------------------------------
diff --git a/has-project/supports/hive/README.md b/has-project/supports/hive/README.md
new file mode 100644
index 0000000..d00e629
--- /dev/null
+++ b/has-project/supports/hive/README.md
@@ -0,0 +1,55 @@
+Enable Hive
+==============
+
+## Hive on hdfs
+
+### 1. Enabling Kerberos Authentication for HiveServer2
+> Update hive-site.xml
+```
+<property>
+  <name>hive.server2.authentication</name>
+  <value>KERBEROS</value>
+</property>
+<property>
+  <name>hive.server2.authentication.kerberos.principal</name>
+  <value>hive/_HOST@HADOOP.COM</value>
+</property>
+<property>
+  <name>hive.server2.authentication.kerberos.keytab</name>
+  <value>/path/to/hive.keytab</value>
+</property>
+```
+
+### 2. Enable impersonation in HiveServer2
+> Update hive-site.xml
+```
+<property>
+  <name>hive.server2.enable.impersonation</name>
+  <description>Enable user impersonation for HiveServer2</description>
+  <value>true</value>
+</property>
+```
+
+> Update core-site.xml of hadoop
+```
+<property>
+  <name>hadoop.proxyuser.hive.hosts</name>
+  <value>*</value>
+</property>
+<property>
+  <name>hadoop.proxyuser.hive.groups</name>
+  <value>*</value>
+</property>
+```
+
+### 3. Start Hive
+> start service
+```
+hive --service metastore &
+hive --service hiveserver2 &
+```
+
+> start hive shell
+```
+hive
+```

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/346c2fab/has-project/supports/zookeeper/README.md
----------------------------------------------------------------------
diff --git a/has-project/supports/zookeeper/README.md b/has-project/supports/zookeeper/README.md
new file mode 100644
index 0000000..edc7a0e
--- /dev/null
+++ b/has-project/supports/zookeeper/README.md
@@ -0,0 +1,59 @@
+Enable ZooKeeper
+===================
+
+## 1. Create the dependency jars
+```
+cd HAS/supports/zookeeper
+mvn clean package
+```
+
+## 2. Copy the jars to ZooKeeper lib directory
+```
+cp HAS/supports/zookeeper/lib/* $ZOOKEEPER_HOME/lib/
+```
+
+## 3. Copy the conf file to ZooKeeper conf directory
+```
+cp HAS/supports/zookeeper/conf/* $ZOOKEEPER_HOME/conf/
+```
+
+## 4. Update Zookeeper security configuration files
+> Update $ZOO_CONF_DIR/jaas.conf
+> Replace "_HOST" with the specific hostname for each host
+```
+Server {
+  com.sun.security.auth.module.Krb5LoginModule required
+  useKeyTab=true
+  keyTab="/path/to/zookeeper.keytab"
+  storeKey=true
+  useTicketCache=true
+  principal="zookeeper/_HOST@HADOOP.COM";
+};
+
+Client {
+  com.sun.security.auth.module.Krb5LoginModule required
+  useKeyTab=true
+  keyTab="/home/hdfs/keytab/hbase.keytab"
+  storeKey=true
+  useTicketCache=false
+  principal="zookeeper/_HOST@HADOOP.COM";
+};
+```
+
+> Update conf/zoo.cfg
+```
+authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider
+jaasLoginRenew=3600000
+kerberos.removeHostFromPrincipal=true
+kerberos.removeRealmFromPrincipal=true
+```
+
+## 5. Verifying the configuration
+```
+zkCli.sh -server hostname:port
+create /znode1 data sasl:zookeeper:cdwra
+getAcl /znode1
+```
+
+> The results from getAcl should show that the proper scheme and permissions were applied to the znode.    
+> like: 'sasl,'zookeeper

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/346c2fab/has-project/supports/zookeeper/conf/jaas.conf
----------------------------------------------------------------------
diff --git a/has-project/supports/zookeeper/conf/jaas.conf b/has-project/supports/zookeeper/conf/jaas.conf
new file mode 100644
index 0000000..570009f
--- /dev/null
+++ b/has-project/supports/zookeeper/conf/jaas.conf
@@ -0,0 +1,13 @@
+ Server {
+      com.sun.security.auth.module.Krb5LoginModule required
+      useKeyTab=true
+      keyTab="/etc/zookeeper/zookeeper.keytab"
+      storeKey=true
+      useTicketCache=true
+      principal="zookeeper/localhost@HADOOP.COM";
+  };
+
+Client {
+  org.apache.kerby.has.client.HasLoginModule required
+  useTgtTicket=true;
+};

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/346c2fab/has-project/supports/zookeeper/conf/java.env
----------------------------------------------------------------------
diff --git a/has-project/supports/zookeeper/conf/java.env b/has-project/supports/zookeeper/conf/java.env
new file mode 100644
index 0000000..bb7098b
--- /dev/null
+++ b/has-project/supports/zookeeper/conf/java.env
@@ -0,0 +1 @@
+export JVMFLAGS="-Djava.security.auth.login.config=$ZOOKEEPER_HOME/conf/jaas.conf"

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/346c2fab/has-project/supports/zookeeper/pom.xml
----------------------------------------------------------------------
diff --git a/has-project/supports/zookeeper/pom.xml b/has-project/supports/zookeeper/pom.xml
new file mode 100644
index 0000000..a0508ab
--- /dev/null
+++ b/has-project/supports/zookeeper/pom.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.kerby</groupId>
+    <artifactId>has-project</artifactId>
+    <version>2.0.0-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>zookeeper-dist</artifactId>
+  <description>ZooKeeper dist</description>
+  <name>ZooKeeper dist</name>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>has-client</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>lib</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+</project>