You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by pl...@apache.org on 2017/11/15 05:12:08 UTC

[03/10] directory-kerby git commit: Add the HAS project to Kerby.

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java
new file mode 100644
index 0000000..1ae1a64
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/TestUtil.java
@@ -0,0 +1,368 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.server.web.WebConfigKey;
+import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.bouncycastle.x509.X509V1CertificateGenerator;
+
+import javax.security.auth.x500.X500Principal;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.math.BigInteger;
+import java.net.URL;
+import java.security.GeneralSecurityException;
+import java.security.InvalidKeyException;
+import java.security.Key;
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.KeyStore;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+import java.security.SecureRandom;
+import java.security.SignatureException;
+import java.security.cert.Certificate;
+import java.security.cert.CertificateEncodingException;
+import java.security.cert.X509Certificate;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+class TestUtil {
+
+  /**
+   * system property for test data: {@value}
+   */
+  private static final String SYSPROP_TEST_DATA_DIR = "test.build.data";
+
+  /**
+   * The default path for using in Hadoop path references: {@value}
+   */
+  private static final String DEFAULT_TEST_DATA_PATH = "target/";
+
+  /**
+   * Get a temp path. This may or may not be relative; it depends on what the
+   * {@link #SYSPROP_TEST_DATA_DIR} is set to. If unset, it returns a path
+   * under the relative path {@link #DEFAULT_TEST_DATA_PATH}
+   *
+   * @param subPath sub path, with no leading "/" character
+   * @return a string to use in paths
+   */
+  public static String getTempPath(String subPath) {
+    String prop = System.getProperty(SYSPROP_TEST_DATA_DIR, DEFAULT_TEST_DATA_PATH);
+    if (prop.isEmpty()) {
+      // corner case: property is there but empty
+      prop = DEFAULT_TEST_DATA_PATH;
+    }
+    if (!prop.endsWith("/")) {
+      prop = prop + "/";
+    }
+    return prop + subPath;
+  }
+
+  public static String getClasspathDir(Class testClass) throws Exception {
+    String file = testClass.getName();
+    file = file.replace('.', '/') + ".class";
+    URL url = Thread.currentThread().getContextClassLoader().getResource(file);
+    String baseDir = url.toURI().getPath();
+    baseDir = baseDir.substring(0, baseDir.length() - file.length() - 1);
+    return baseDir;
+  }
+
+  @SuppressWarnings("deprecation")
+  /*
+   * Create a self-signed X.509 Certificate.
+   *
+   * @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB"
+   * @param pair the KeyPair
+   * @param days how many days from now the Certificate is valid for
+   * @param algorithm the signing algorithm, eg "SHA1withRSA"
+   * @return the self-signed certificate
+   */
+  private static X509Certificate generateCertificate(String dn, KeyPair pair, int days, String algorithm)
+      throws CertificateEncodingException, InvalidKeyException, IllegalStateException,
+      NoSuchProviderException, NoSuchAlgorithmException, SignatureException {
+
+    Date from = new Date();
+    Date to = new Date(from.getTime() + days * 86400000L);
+    BigInteger sn = new BigInteger(64, new SecureRandom());
+    X509V1CertificateGenerator certGen = new X509V1CertificateGenerator();
+    X500Principal dnName = new X500Principal(dn);
+
+    certGen.setSerialNumber(sn);
+    certGen.setIssuerDN(dnName);
+    certGen.setNotBefore(from);
+    certGen.setNotAfter(to);
+    certGen.setSubjectDN(dnName);
+    certGen.setPublicKey(pair.getPublic());
+    certGen.setSignatureAlgorithm(algorithm);
+
+    return certGen.generate(pair.getPrivate());
+  }
+
+  private static KeyPair generateKeyPair(String algorithm) throws NoSuchAlgorithmException {
+    KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
+    keyGen.initialize(1024);
+    return keyGen.genKeyPair();
+  }
+
+  private static KeyStore createEmptyKeyStore() throws GeneralSecurityException, IOException {
+    KeyStore ks = KeyStore.getInstance("JKS");
+    ks.load(null, null); // initialize
+    return ks;
+  }
+
+  private static void saveKeyStore(KeyStore ks, String filename, String password)
+      throws GeneralSecurityException, IOException {
+    FileOutputStream out = new FileOutputStream(filename);
+    ks.store(out, password.toCharArray());
+    out.close();
+  }
+
+  private static void createKeyStore(String filename, String password, String alias, Key privateKey, Certificate cert)
+      throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    ks.setKeyEntry(alias, privateKey, password.toCharArray(), new Certificate[]{cert});
+    saveKeyStore(ks, filename, password);
+  }
+
+  private static <T extends Certificate> void createTrustStore(String filename, String password, Map<String, T> certs)
+      throws GeneralSecurityException, IOException {
+    KeyStore ks = createEmptyKeyStore();
+    for (Map.Entry<String, T> cert : certs.entrySet()) {
+      ks.setCertificateEntry(cert.getKey(), cert.getValue());
+    }
+    saveKeyStore(ks, filename, password);
+  }
+
+  /**
+   * Performs complete setup of SSL configuration in preparation for testing an
+   * SSLFactory.  This includes keys, certs, keystore, truststore, the server
+   * SSL configuration file, the client SSL configuration file, and the master
+   * configuration file read by the SSLFactory.
+   *
+   * @param keystoreDir   String directory to save keystore
+   * @param sslConfDir    String directory to save SSL configuration files
+   * @param conf          Configuration master configuration to be used by an SSLFactory,
+   *                      which will be mutated by this method
+   * @param useClientCert boolean true to make the client present a cert in the SSL handshake
+   */
+  public static void setupSSLConfig(String keystoreDir, String sslConfDir, HasConfig conf, boolean useClientCert)
+      throws Exception {
+    setupSSLConfig(keystoreDir, sslConfDir, conf, useClientCert, true, "");
+  }
+
+  /**
+   * Performs complete setup of SSL configuration in preparation for testing an
+   * SSLFactory.  This includes keys, certs, keystore, truststore, the server
+   * SSL configuration file, the client SSL configuration file, and the master
+   * configuration file read by the SSLFactory.
+   *
+   * @param keystoreDir   String directory to save keystore
+   * @param sslConfDir    String directory to save SSL configuration files
+   * @param conf          Configuration master configuration to be used by an SSLFactory,
+   *                      which will be mutated by this method
+   * @param useClientCert boolean true to make the client present a cert in the SSL handshake
+   * @param trustStore    boolean true to create truststore, false not to create it
+   * @param excludeCiphers String comma separated ciphers to exclude
+   * @throws Exception e
+   */
+  private static void setupSSLConfig(String keystoreDir, String sslConfDir, HasConfig conf, boolean useClientCert,
+                                     boolean trustStore, String excludeCiphers) throws Exception {
+    String clientKS = keystoreDir + "/clientKS.jks";
+    String clientPassword = "clientP";
+    String serverKS = keystoreDir + "/serverKS.jks";
+    String serverPassword = "serverP";
+    String trustKS = null;
+    String trustPassword = "trustP";
+
+    File sslClientConfFile = new File(sslConfDir, getClientSSLConfigFileName());
+    File sslServerConfFile = new File(sslConfDir, getServerSSLConfigFileName());
+
+    Map<String, X509Certificate> certs = new HashMap<String, X509Certificate>();
+
+    if (useClientCert) {
+      KeyPair cKP = TestUtil.generateKeyPair("RSA");
+      X509Certificate cCert = TestUtil.generateCertificate("CN=localhost, O=client", cKP, 30, "SHA1withRSA");
+      TestUtil.createKeyStore(clientKS, clientPassword, "client", cKP.getPrivate(), cCert);
+      certs.put("client", cCert);
+    }
+
+    KeyPair sKP = TestUtil.generateKeyPair("RSA");
+    X509Certificate sCert = TestUtil.generateCertificate("CN=localhost, O=server", sKP, 30, "SHA1withRSA");
+    TestUtil.createKeyStore(serverKS, serverPassword, "server", sKP.getPrivate(), sCert);
+    certs.put("server", sCert);
+
+    if (trustStore) {
+      trustKS = keystoreDir + "/trustKS.jks";
+      TestUtil.createTrustStore(trustKS, trustPassword, certs);
+    }
+
+    HasConfig clientSSLConf = createClientSSLConfig(clientKS, clientPassword, clientPassword, trustKS, excludeCiphers);
+    HasConfig serverSSLConf = createServerSSLConfig(serverKS, serverPassword, serverPassword, trustKS, excludeCiphers);
+
+    saveConfig(sslClientConfFile, clientSSLConf);
+    saveConfig(sslServerConfFile, serverSSLConf);
+
+    conf.setString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
+    conf.setString(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfFile.getCanonicalPath());
+    conf.setString(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile.getCanonicalPath());
+    conf.setString(WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, sslServerConfFile.getAbsolutePath());
+    conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, useClientCert);
+  }
+
+  /**
+   * Create SSL configuration for a client.
+   *
+   * @param clientKS       String client keystore file
+   * @param password       String store password, or null to avoid setting store password
+   * @param keyPassword    String key password, or null to avoid setting key password
+   * @param trustKS        String truststore file
+   * @param excludeCiphers String comma separated ciphers to exclude
+   * @return Configuration for client SSL
+   */
+  private static HasConfig createClientSSLConfig(String clientKS, String password, String keyPassword,
+                                                 String trustKS, String excludeCiphers) {
+    return createSSLConfig(SSLFactory.Mode.CLIENT, clientKS, password, keyPassword, trustKS, excludeCiphers);
+  }
+
+  /**
+   * Creates SSL configuration for a server.
+   *
+   * @param serverKS       String server keystore file
+   * @param password       String store password, or null to avoid setting store password
+   * @param keyPassword    String key password, or null to avoid setting key password
+   * @param trustKS        String truststore file
+   * @param excludeCiphers String comma separated ciphers to exclude
+   * @return HasConfig
+   * @throws IOException e
+   */
+  private static HasConfig createServerSSLConfig(String serverKS, String password, String keyPassword,
+                                                 String trustKS, String excludeCiphers) throws IOException {
+    return createSSLConfig(SSLFactory.Mode.SERVER, serverKS, password, keyPassword, trustKS, excludeCiphers);
+  }
+
+  /**
+   * Returns the client SSL configuration file name.  Under parallel test
+   * execution, this file name is parametrized by a unique ID to ensure that
+   * concurrent tests don't collide on an SSL configuration file.
+   *
+   * @return client SSL configuration file name
+   */
+  private static String getClientSSLConfigFileName() {
+    return getSSLConfigFileName("ssl-client");
+  }
+
+  /**
+   * Returns the server SSL configuration file name.  Under parallel test
+   * execution, this file name is parametrized by a unique ID to ensure that
+   * concurrent tests don't collide on an SSL configuration file.
+   *
+   * @return client SSL configuration file name
+   */
+  private static String getServerSSLConfigFileName() {
+    return getSSLConfigFileName("ssl-server");
+  }
+
+  /**
+   * Returns an SSL configuration file name.  Under parallel test
+   * execution, this file name is parametrized by a unique ID to ensure that
+   * concurrent tests don't collide on an SSL configuration file.
+   *
+   * @param base the base of the file name
+   * @return SSL configuration file name for base
+   */
+  private static String getSSLConfigFileName(String base) {
+    String testUniqueForkId = System.getProperty("test.unique.fork.id");
+    String fileSuffix = testUniqueForkId != null ? "-" + testUniqueForkId : "";
+    return base + fileSuffix + ".xml";
+  }
+
+  /**
+   * Creates SSL configuration.
+   *
+   * @param mode        SSLFactory.Mode mode to configure
+   * @param keystore    String keystore file
+   * @param password    String store password, or null to avoid setting store password
+   * @param keyPassword String key password, or null to avoid setting key password
+   * @param trustKS     String truststore file
+   * @return Configuration for SSL
+   */
+  private static HasConfig createSSLConfig(SSLFactory.Mode mode, String keystore, String password,
+                                           String keyPassword, String trustKS, String excludeCiphers) {
+    String trustPassword = "trustP";
+
+    HasConfig sslConf = new HasConfig();
+    if (keystore != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_KEYSTORE_LOCATION_TPL_KEY), keystore);
+    }
+    if (password != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY), password);
+    }
+    if (keyPassword != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
+          keyPassword);
+    }
+    if (trustKS != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_TRUSTSTORE_LOCATION_TPL_KEY), trustKS);
+    }
+    if (trustPassword != null) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_TRUSTSTORE_PASSWORD_TPL_KEY),
+          trustPassword);
+    }
+    if (null != excludeCiphers && !excludeCiphers.isEmpty()) {
+      sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+          FileBasedKeyStoresFactory.SSL_EXCLUDE_CIPHER_LIST),
+          excludeCiphers);
+    }
+    sslConf.setString(FileBasedKeyStoresFactory.resolvePropertyName(mode,
+        FileBasedKeyStoresFactory.SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY), "1000");
+
+    return sslConf;
+  }
+
+  /**
+   * Saves configuration to a file.
+   *
+   * @param file File to save
+   * @param conf Configuration contents to write to file
+   * @throws IOException if there is an I/O error saving the file
+   */
+  private static void saveConfig(File file, HasConfig conf) throws IOException {
+    OutputStream output = new FileOutputStream(file);
+    Properties prop = new Properties();
+
+    // set the properties value
+    for (String name : conf.getNames()) {
+      prop.setProperty(name, conf.getString(name));
+    }
+
+    // save properties to project root folder
+    prop.store(output, null);
+  }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java
new file mode 100644
index 0000000..1f7b443
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonConfApi.java
@@ -0,0 +1,83 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.json;
+
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import java.io.File;
+import java.io.IOException;
+import javax.ws.rs.core.MultivaluedMap;
+
+import static org.junit.Assert.assertEquals;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestJsonConfApi extends TestRestApiBase {
+
+    @Test
+    public void testSetPlugin() {
+        WebResource webResource = getWebResource("conf/setplugin");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("plugin", "RAM");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("HAS plugin set successfully.\n", response);
+    }
+
+    @Test
+    public void testConfigKdcBackend() {
+        WebResource webResource = getWebResource("conf/configkdcbackend");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("backendType", "json");
+        String backend = null;
+        try {
+            backend = new File(testDir, "json-backend").getCanonicalPath();
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        params.add("dir", backend);
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("Json backend set successfully.\n", response);
+    }
+
+    @Test
+    public void testConfigXJsonKdc() {
+        WebResource webResource = getWebResource("conf/configkdc");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("realm", "HADOOP.COM");
+        params.add("host", "localhost");
+        params.add("port", "8866");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("HAS server KDC set successfully.\n", response);
+    }
+
+    @Test
+    public void testGetKrb5Conf() {
+        getKrb5Conf();
+    }
+
+    @Test
+    public void testGetHasConf() {
+        getHasConf();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java
new file mode 100644
index 0000000..412a8a1
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHadminApi.java
@@ -0,0 +1,80 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.json;
+
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import javax.ws.rs.core.MultivaluedMap;
+
+import static org.junit.Assert.assertEquals;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestJsonHadminApi extends TestRestApiBase {
+
+    @Test
+    public void testCreatePrincipals() {
+        createPrincipals();
+    }
+
+    @Test
+    public void testExportKeytabs() {
+        exportKeytabs();
+    }
+
+    @Test
+    public void testExportKeytab() {
+        exportKeytab();
+    }
+
+    @Test
+    public void testAddPrincipal() {
+        addPrincipal();
+    }
+
+    @Test
+    public void testGetPrincipals() {
+        getPrincipals();
+    }
+
+    @Test
+    public void testRenamePrincipal() {
+        renamePrincipal();
+    }
+
+    @Test
+    public void testXDeletePrincipal() {
+        deletePrincipal();
+    }
+
+    @Test
+    public void testSetConf() {
+        WebResource webResource = getWebResource("admin/setconf");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("isEnable", "true");
+        ClientResponse response = webResource.queryParams(params).put(ClientResponse.class);
+        assertEquals(200, response.getStatus());
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java
new file mode 100644
index 0000000..bd72448
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/json/TestJsonHasApi.java
@@ -0,0 +1,54 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.json;
+
+import com.sun.jersey.api.client.WebResource;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import java.io.File;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestJsonHasApi extends TestRestApiBase {
+
+    @Test
+    public void hostRoles() {
+        WebResource webResource = getWebResource("hostroles");
+        String response = webResource.get(String.class);
+        System.out.println(response);
+    }
+
+    @Test
+    public void testKdcStart() {
+        kdcStart();
+        File backendDir = new File(testDir, "json-backend");
+        if (backendDir.exists()) {
+            FileUtil.fullyDelete(backendDir);
+        }
+    }
+
+    @Test
+    public void testKdcInit() {
+        kdcInit();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java
new file mode 100644
index 0000000..6dc240d
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLConfApi.java
@@ -0,0 +1,70 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.mysql;
+
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import javax.ws.rs.core.MultivaluedMap;
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestMySQLConfApi extends TestRestApiBase {
+
+    @Test
+    public void testConfigKdcBackend() throws IOException {
+        WebResource webResource = getWebResource("conf/configkdcbackend");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("backendType", "mysql");
+        params.add("driver", "org.h2.Driver");
+        params.add("url", "jdbc:h2:" + testDir.getCanonicalPath() + "/mysql-backend/mysqlbackend;MODE=MySQL");
+        params.add("user", "root");
+        params.add("password", "123456");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("MySQL backend set successfully.\n", response);
+    }
+
+    @Test
+    public void testConfigMySQLKdc() {
+        WebResource webResource = getWebResource("conf/configkdc");
+        MultivaluedMap<String, String> params = new MultivaluedMapImpl();
+        params.add("realm", "HADOOP.COM");
+        params.add("host", "localhost");
+        params.add("port", "8899");
+        String response = webResource.queryParams(params).put(String.class);
+        assertEquals("HAS server KDC set successfully.\n", response);
+    }
+
+    @Test
+    public void testGetKrb5Conf() {
+        getKrb5Conf();
+    }
+
+    @Test
+    public void testGetHasConf() {
+        getHasConf();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java
new file mode 100644
index 0000000..8adb625
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHadminApi.java
@@ -0,0 +1,64 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.mysql;
+
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestMySQLHadminApi extends TestRestApiBase {
+
+    @Test
+    public void testCreatePrincipals() {
+        createPrincipals();
+    }
+
+    @Test
+    public void testExportKeytabs() {
+        exportKeytabs();
+    }
+
+    @Test
+    public void testExportKeytab() {
+        exportKeytab();
+    }
+
+    @Test
+    public void testAddPrincipal() {
+        addPrincipal();
+    }
+
+    @Test
+    public void testGetPrincipals() {
+        getPrincipals();
+    }
+
+    @Test
+    public void testRenamePrincipal() {
+        renamePrincipal();
+    }
+
+    @Test
+    public void testXDeletePrincipal() {
+        deletePrincipal();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java
new file mode 100644
index 0000000..f2b6a4a
--- /dev/null
+++ b/has/has-server/src/test/java/org/apache/hadoop/has/server/mysql/TestMySQLHasApi.java
@@ -0,0 +1,46 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.mysql;
+
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.has.server.TestRestApiBase;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+
+import java.io.File;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestMySQLHasApi extends TestRestApiBase {
+
+    @Test
+    public void testKdcStart() {
+        kdcStart();
+        File backendDir = new File(testDir, "mysql-backend");
+        if (backendDir.exists()) {
+            FileUtil.fullyDelete(backendDir);
+        }
+    }
+
+    @Test
+    public void testKdcInit() {
+        kdcInit();
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/conf/backend.conf
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/conf/backend.conf b/has/has-server/src/test/resources/conf/backend.conf
new file mode 100644
index 0000000..2085217
--- /dev/null
+++ b/has/has-server/src/test/resources/conf/backend.conf
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+kdc_identity_backend = org.apache.kerby.kerberos.kdc.identitybackend.JsonIdentityBackend
+backend.json.dir = /tmp/test/has/jsonbackend

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/conf/has-server.conf
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/conf/has-server.conf b/has/has-server/src/test/resources/conf/has-server.conf
new file mode 100644
index 0000000..dcea4ad
--- /dev/null
+++ b/has/has-server/src/test/resources/conf/has-server.conf
@@ -0,0 +1,25 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[HAS]
+  https_host = localhost
+  https_port = 8092
+  enable_conf = true
+
+[PLUGIN]
+  auth_type = RAM

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/conf/kdc.conf
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/conf/kdc.conf b/has/has-server/src/test/resources/conf/kdc.conf
new file mode 100644
index 0000000..a74e180
--- /dev/null
+++ b/has/has-server/src/test/resources/conf/kdc.conf
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[kdcdefaults]
+  kdc_host = localhost
+  kdc_udp_port = 8866
+  kdc_tcp_port = 8866
+  kdc_realm = HADOOP.COM

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/conf/krb5.conf
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/conf/krb5.conf b/has/has-server/src/test/resources/conf/krb5.conf
new file mode 100644
index 0000000..0f5c367
--- /dev/null
+++ b/has/has-server/src/test/resources/conf/krb5.conf
@@ -0,0 +1,29 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[libdefaults]
+    kdc_realm = HADOOP.COM
+    default_realm = HADOOP.COM
+    udp_preference_limit = 4096
+    kdc_tcp_port = 8866
+    kdc_udp_port = 8866
+
+[realms]
+    HADOOP.COM = {
+        kdc = localhost:8866
+    }

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/webapps/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/webapps/WEB-INF/web.xml b/has/has-server/src/test/resources/webapps/WEB-INF/web.xml
new file mode 100644
index 0000000..b13cb1f
--- /dev/null
+++ b/has/has-server/src/test/resources/webapps/WEB-INF/web.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License. See accompanying LICENSE file.
+-->
+<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee">
+
+</web-app>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/test/resources/webapps/has/index.html
----------------------------------------------------------------------
diff --git a/has/has-server/src/test/resources/webapps/has/index.html b/has/has-server/src/test/resources/webapps/has/index.html
new file mode 100644
index 0000000..6f80950
--- /dev/null
+++ b/has/has-server/src/test/resources/webapps/has/index.html
@@ -0,0 +1,24 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta http-equiv="REFRESH" charset="UTF-8" />
+<title>HAS Administration</title>
+</head>
+</html>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/pom.xml b/has/has-tool/has-client-tool/pom.xml
new file mode 100644
index 0000000..7ded9e1
--- /dev/null
+++ b/has/has-tool/has-client-tool/pom.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>has-tool</artifactId>
+        <groupId>org.apache.hadoop</groupId>
+        <version>1.0.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>has-client-tool</artifactId>
+
+    <dependencies>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>has-client</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.kerby</groupId>
+        <artifactId>kdc-tool</artifactId>
+        <version>${kerby.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>has-plugins</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+    </dependencies>
+
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java
new file mode 100644
index 0000000..8756420
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/HadminRemoteTool.java
@@ -0,0 +1,164 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HadminRemoteCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteAddPrincipalCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteCreatePrincipalsCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteDeletePrincipalCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteDisableConfCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteEnableConfCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteExportKeytabsCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteGetHostRolesCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteGetPrincipalsCmd;
+import org.apache.hadoop.has.tool.client.hadmin.remote.cmd.HasRemoteRenamePrincipalCmd;
+import org.apache.kerby.util.OSUtil;
+
+import java.io.File;
+import java.util.Scanner;
+
+public class HadminRemoteTool {
+
+    private static final String PROMPT = HadminRemoteTool.class.getSimpleName() + ".remote";
+    private static final String USAGE = (OSUtil.isWindows()
+        ? "Usage: bin\\hadmin-remote.cmd" : "Usage: sh bin/hadmin-remote.sh")
+        + " <conf-file>\n"
+        + "\tExample:\n"
+        + "\t\t"
+        + (OSUtil.isWindows()
+        ? "bin\\hadmin-remote.cmd" : "sh bin/hadmin-remote.sh")
+        + " conf\n";
+
+    private static final String LEGAL_COMMANDS = "Available commands are: "
+        + "\n"
+        + "add_principal, addprinc\n"
+        + "                         Add principal\n"
+        + "delete_principal, delprinc\n"
+        + "                         Delete principal\n"
+        + "rename_principal, renprinc\n"
+        + "                         Rename principal\n"
+        + "list_principals, listprincs\n"
+        + "                         List principals\n"
+        + "get_hostroles, hostroles\n"
+        + "                         Get hostRoles\n"
+        + "export_keytabs, expkeytabs\n"
+        + "                         Export keytabs\n"
+        + "create_principals, creprincs\n"
+        + "                         Create principals\n"
+        + "enable_configure, enable\n"
+        + "                         Enable configure\n"
+        + "disable_configure, disable\n"
+        + "                         Disable configure\n";
+
+    public static void main(String[] args) {
+        HasAdminClient hadmin;
+        HasAuthAdminClient authHasAdminClient = null;
+
+        if (args.length < 1) {
+            System.err.println(USAGE);
+            System.exit(1);
+        }
+
+        String confDirPath = args[0];
+        File confFile = new File(confDirPath, "hadmin.conf");
+        HasConfig hasConfig;
+        try {
+            hasConfig = HasUtil.getHasConfig(confFile);
+        } catch (HasException e) {
+            System.err.println(e.getMessage());
+            return;
+        }
+
+        hadmin = new HasAdminClient(hasConfig);
+
+        if (hasConfig.getFilterAuthType().equals("kerberos")) {
+            authHasAdminClient = new HasAuthAdminClient(hasConfig);
+        }
+
+        System.out.println("enter \"cmd\" to see legal commands.");
+        System.out.print(PROMPT + ": ");
+
+        try (Scanner scanner = new Scanner(System.in, "UTF-8")) {
+            String input = scanner.nextLine();
+
+            while (!(input.equals("quit") || input.equals("exit") || input.equals("q"))) {
+                try {
+                    execute(hadmin, authHasAdminClient, input);
+                } catch (HasException e) {
+                    System.err.println(e.getMessage());
+                }
+                System.out.print(PROMPT + ": ");
+                input = scanner.nextLine();
+            }
+        }
+    }
+
+    private static void execute(HasAdminClient hadmin, HasAuthAdminClient hasAuthAdminClient,
+                               String input) throws HasException {
+        input = input.trim();
+        if (input.startsWith("cmd")) {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        HadminRemoteCmd executor;
+
+        String[] items = input.split("\\s+");
+        String cmd = items[0];
+
+        if (cmd.equals("add_principal")
+            || cmd.equals("addprinc")) {
+            executor = new HasRemoteAddPrincipalCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("delete_principal")
+            || cmd.equals("delprinc")) {
+            executor = new HasRemoteDeletePrincipalCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("rename_principal")
+            || cmd.equals("renprinc")) {
+            executor = new HasRemoteRenamePrincipalCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("list_principals")
+            || cmd.equals("listprincs")) {
+            executor = new HasRemoteGetPrincipalsCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("get_hostroles")
+            || cmd.equals("hostroles")) {
+            executor = new HasRemoteGetHostRolesCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("create_principals")
+            || cmd.equals("creprincs")) {
+            executor = new HasRemoteCreatePrincipalsCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("export_keytabs")
+            || cmd.equals("expkeytabs")) {
+            executor = new HasRemoteExportKeytabsCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("enable_configure")
+            || cmd.equals("enable")) {
+            executor = new HasRemoteEnableConfCmd(hadmin, hasAuthAdminClient);
+        } else if (cmd.equals("disable_configure")
+            || cmd.equals("disable")) {
+            executor = new HasRemoteDisableConfCmd(hadmin, hasAuthAdminClient);
+        } else {
+            System.out.println(LEGAL_COMMANDS);
+            return;
+        }
+        executor.execute(items);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
new file mode 100644
index 0000000..81f6d98
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HadminRemoteCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+public abstract class HadminRemoteCmd {
+
+    private HasAdminClient hadmin;
+    private HasAuthAdminClient authHadmin;
+
+    public HadminRemoteCmd(HasAdminClient hadmin, HasAuthAdminClient authHadminClient) {
+        this.hadmin = hadmin;
+        this.authHadmin = authHadminClient;
+    }
+
+    protected HasAdminClient getHadmin() {
+        return hadmin;
+    }
+
+    protected HasAuthAdminClient getAuthHadmin() {
+        return authHadmin;
+    }
+
+    /**
+     * Execute the hadmin cmd.
+     * @param input Input cmd to execute
+     */
+    public abstract void execute(String[] input) throws HasException;
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
new file mode 100644
index 0000000..39a24d0
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteAddPrincipalCmd.java
@@ -0,0 +1,70 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+/**
+ * Remote add principal cmd
+ */
+public class HasRemoteAddPrincipalCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: add_principal [options] <principal-name>\n"
+        + "\toptions are:\n"
+        + "\t\t[-randkey]\n"
+        + "\t\t[-pw password]"
+        + "\tExample:\n"
+        + "\t\tadd_principal -pw mypassword alice\n";
+
+    public HasRemoteAddPrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        String clientPrincipal = items[items.length - 1];
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        if (!items[1].startsWith("-")) {
+            hasAdminClient.addPrincipal(clientPrincipal);
+        } else if (items[1].startsWith("-randkey")) {
+            hasAdminClient.addPrincipal(clientPrincipal);
+        } else if (items[1].startsWith("-pw")) {
+            String password = items[2];
+            hasAdminClient.addPrincipal(clientPrincipal, password);
+        } else {
+            System.err.println("add_principal cmd format error.");
+            System.err.println(USAGE);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
new file mode 100644
index 0000000..aa79e23
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteCreatePrincipalsCmd.java
@@ -0,0 +1,82 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+
+public class HasRemoteCreatePrincipalsCmd extends HadminRemoteCmd {
+    private static final String USAGE = "\nUsage: create_principals [hostRoles-file]\n"
+            + "\t'hostRoles-file' is a file with a hostRoles json string like:\n"
+            + "\t\t{HOSTS: [ {\"name\":\"host1\",\"hostRoles\":\"HDFS\"}, "
+            + "{\"name\":\"host2\",\"hostRoles\":\"HDFS,HBASE\"} ] }\n"
+            + "\tExample:\n"
+            + "\t\tcreate_principals hostroles.txt\n";
+
+    public HasRemoteCreatePrincipalsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        //String param = items[0];
+        if (items.length != 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        File hostRoles = new File(items[1]);
+        if (!hostRoles.exists()) {
+            System.err.println("HostRoles file is not exists.");
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        BufferedReader reader;
+        try {
+            reader = new BufferedReader(new FileReader(hostRoles));
+        } catch (FileNotFoundException e) {
+            throw new HasException("File not exist", e);
+        }
+        StringBuilder sb = new StringBuilder();
+        String tempString;
+        try {
+            while ((tempString = reader.readLine()) != null) {
+                sb.append(tempString);
+            }
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when read line. ", e);
+        }
+        hasAdminClient.requestCreatePrincipals(sb.toString());
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
new file mode 100644
index 0000000..260ff2c
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDeletePrincipalCmd.java
@@ -0,0 +1,89 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+import java.io.Console;
+import java.util.Scanner;
+
+/**
+ * Remote delete principal cmd
+ */
+public class HasRemoteDeletePrincipalCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: delete_principal <principal-name>\n"
+        + "\tExample:\n"
+        + "\t\tdelete_principal alice\n";
+
+    public HasRemoteDeletePrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        String principal = items[items.length - 1];
+        String reply;
+        Console console = System.console();
+        String prompt = "Are you sure to delete the principal? (yes/no, YES/NO, y/n, Y/N) ";
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                + "maybe you're running this from within an IDE. "
+                + "Use scanner to read password.");
+            Scanner scanner = new Scanner(System.in, "UTF-8");
+            reply = getReply(scanner, prompt);
+        } else {
+            reply = getReply(console, prompt);
+        }
+        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
+            hasAdminClient.deletePrincipal(principal);
+        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
+            System.out.println("Principal \"" + principal + "\"  not deleted.");
+        } else {
+            System.err.println("Unknown request, fail to delete the principal.");
+            System.err.println(USAGE);
+        }
+    }
+
+    private String getReply(Scanner scanner, String prompt) {
+        System.out.println(prompt);
+        return scanner.nextLine().trim();
+    }
+
+    private String getReply(Console console, String prompt) {
+        console.printf(prompt);
+        String line = console.readLine();
+        return line;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
new file mode 100644
index 0000000..30027b3
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteDisableConfCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+/**
+ * Remote add principal cmd
+ */
+public class HasRemoteDisableConfCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: disable_configure\n"
+            + "\tExample:\n"
+            + "\t\tdisable\n";
+
+    public HasRemoteDisableConfCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+        hasAdminClient.setEnableOfConf("false");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
new file mode 100644
index 0000000..852d487
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteEnableConfCmd.java
@@ -0,0 +1,49 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+/**
+ * Remote add principal cmd
+ */
+public class HasRemoteEnableConfCmd extends HadminRemoteCmd {
+
+    public static final String USAGE = "Usage: enable_configure\n"
+            + "\tExample:\n"
+            + "\t\tenable\n";
+
+    public HasRemoteEnableConfCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+        hasAdminClient.setEnableOfConf("true");
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
new file mode 100644
index 0000000..ead3b28
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteExportKeytabsCmd.java
@@ -0,0 +1,58 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+public class HasRemoteExportKeytabsCmd extends HadminRemoteCmd {
+    private static final String USAGE = "\nUsage: export_keytabs <host> [role]\n"
+            + "\tExample:\n"
+            + "\t\texport_keytabs host1 HDFS\n";
+
+    public HasRemoteExportKeytabsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        //TODO add save path option
+        //String param = items[0];
+        if (items.length < 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        String host = items[1];
+        String role = "";
+        if (items.length >= 3) {
+            role = items[2];
+        }
+        hasAdminClient.getKeytabByHostAndRole(host, role);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
new file mode 100644
index 0000000..70b9ea7
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetHostRolesCmd.java
@@ -0,0 +1,68 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+
+public class HasRemoteGetHostRolesCmd extends HadminRemoteCmd {
+    private static final String USAGE = "Usage: get_hostroles\n"
+            + "\tExample:\n"
+            + "\t\tget_hostroles\n";
+
+    public HasRemoteGetHostRolesCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] input) throws HasException {
+        HasAdminClient hasAdminClient = getHadmin();
+        String result = hasAdminClient.getHostRoles();
+
+        if (result != null) {
+            try {
+                JSONArray hostRoles = new JSONArray(result);
+                for (int i = 0; i < hostRoles.length(); i++) {
+                    JSONObject hostRole = hostRoles.getJSONObject(i);
+                    System.out.print("\tHostRole: " + hostRole.getString("HostRole")
+                            + ", PrincipalNames: ");
+                    JSONArray principalNames = hostRole.getJSONArray("PrincipalNames");
+                    for (int j = 0; j < principalNames.length(); j++) {
+                        System.out.print(principalNames.getString(j));
+                        if (j == principalNames.length() - 1) {
+                            System.out.println();
+                        } else {
+                            System.out.print(", ");
+                        }
+                    }
+                }
+            } catch (JSONException e) {
+                throw new HasException("Errors occurred when getting the host roles.", e);
+            }
+        } else {
+            throw new HasException("Could not get hostRoles.");
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
new file mode 100644
index 0000000..05d6970
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteGetPrincipalsCmd.java
@@ -0,0 +1,76 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+import java.util.List;
+
+public class HasRemoteGetPrincipalsCmd extends HadminRemoteCmd {
+    private static final String USAGE = "Usage: list_principals [expression]\n"
+            + "\t'expression' is a shell-style glob expression that can contain the wild-card characters ?, *, and []."
+            + "\tExample:\n"
+            + "\t\tlist_principals [expression]\n";
+
+    public HasRemoteGetPrincipalsCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length > 2) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        List<String> principalLists = null;
+
+        if (items.length == 1) {
+            try {
+                principalLists = hasAdminClient.getPrincipals();
+            } catch (Exception e) {
+                System.err.println("Errors occurred when getting the principals. " + e.getMessage());
+            }
+        } else {
+            //have expression
+            String exp = items[1];
+            principalLists = hasAdminClient.getPrincipals(exp);
+        }
+
+        if (principalLists.size() == 0 || principalLists.size() == 1 && principalLists.get(0).isEmpty()) {
+            return;
+        } else {
+            System.out.println("Principals are listed:");
+            for (int i = 0; i < principalLists.size(); i++) {
+                System.out.println(principalLists.get(i));
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
----------------------------------------------------------------------
diff --git a/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
new file mode 100644
index 0000000..f900f3a
--- /dev/null
+++ b/has/has-tool/has-client-tool/src/main/java/org/apache/hadoop/has/tool/client/hadmin/remote/cmd/HasRemoteRenamePrincipalCmd.java
@@ -0,0 +1,91 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.tool.client.hadmin.remote.cmd;
+
+import org.apache.hadoop.has.client.HasAdminClient;
+import org.apache.hadoop.has.client.HasAuthAdminClient;
+import org.apache.hadoop.has.common.HasException;
+
+import java.io.Console;
+import java.util.Scanner;
+
+/**
+ * Remote rename principal cmd
+ */
+public class HasRemoteRenamePrincipalCmd extends HadminRemoteCmd {
+    public static final String USAGE = "Usage: rename_principal <old_principal_name>"
+        + " <new_principal_name>\n"
+        + "\tExample:\n"
+        + "\t\trename_principal alice bob\n";
+
+    public HasRemoteRenamePrincipalCmd(HasAdminClient hadmin, HasAuthAdminClient authHadmin) {
+        super(hadmin, authHadmin);
+    }
+
+    @Override
+    public void execute(String[] items) throws HasException {
+        if (items.length < 3) {
+            System.err.println(USAGE);
+            return;
+        }
+
+        HasAdminClient hasAdminClient;
+        if (getAuthHadmin() != null) {
+            hasAdminClient = getAuthHadmin();
+        } else {
+            hasAdminClient = getHadmin();
+        }
+
+        String oldPrincipalName = items[items.length - 2];
+        String newPrincipalName = items[items.length - 1];
+
+        String reply;
+        Console console = System.console();
+        String prompt = "Are you sure to rename the principal? (yes/no, YES/NO, y/n, Y/N) ";
+        if (console == null) {
+            System.out.println("Couldn't get Console instance, "
+                + "maybe you're running this from within an IDE. "
+                + "Use scanner to read password.");
+            Scanner scanner = new Scanner(System.in, "UTF-8");
+            reply = getReply(scanner, prompt);
+        } else {
+            reply = getReply(console, prompt);
+        }
+        if (reply.equals("yes") || reply.equals("YES") || reply.equals("y") || reply.equals("Y")) {
+            hasAdminClient.renamePrincipal(oldPrincipalName, newPrincipalName);
+        } else if (reply.equals("no") || reply.equals("NO") || reply.equals("n") || reply.equals("N")) {
+            System.out.println("Principal \"" + oldPrincipalName + "\"  not renamed.");
+        } else {
+            System.err.println("Unknown request, fail to rename the principal.");
+            System.err.println(USAGE);
+        }
+    }
+
+    private String getReply(Scanner scanner, String prompt) {
+        System.out.println(prompt);
+        return scanner.nextLine().trim();
+    }
+
+    private String getReply(Console console, String prompt) {
+        console.printf(prompt);
+        String line = console.readLine();
+        return line;
+    }
+}