You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by pl...@apache.org on 2017/11/28 03:04:09 UTC

[08/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
deleted file mode 100644
index 82bb129..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-@InterfaceAudience.Public
-@InterfaceStability.Stable
-public enum HostRoleType {
-    HDFS("HDFS", new String[]{"HTTP", "hdfs"}),
-    YARN("YARN", new String[]{"yarn"}),
-    MAPRED("MAPRED", new String[]{"mapred"}),
-    HBASE("HBASE", new String[]{"hbase"}),
-    ZOOKEEPER("ZOOKEEPER", new String[]{"zookeeper"}),
-    SPARK("SPARK", new String[]{"spark"}),
-    HIVE("HIVE", new String[]{"hive"}),
-    OOZIE("OOZIE", new String[]{"oozie"}),
-    HUE("HUE", new String[]{"hue"});
-
-    private String name;
-    private String[] princs;
-
-    HostRoleType(String name, String[] princs) {
-        this.name = name;
-        this.princs = princs;
-    }
-
-    public String[] getPrincs() {
-        return princs;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
deleted file mode 100644
index bd0a1ca..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.has.server.web;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-
-/** 
- * This class contains constants for configuration keys and default values
- * used in hdfs.
- */
-@InterfaceAudience.Private
-public class WebConfigKey {
-
-  public static final int HAS_HTTP_PORT_DEFAULT = 9870;
-  public static final String HAS_HTTP_HOST_DEFAULT = "0.0.0.0";
-  public static final String HAS_HTTP_ADDRESS_KEY = "has.http-address";
-  public static final String HAS_HTTP_ADDRESS_DEFAULT = HAS_HTTP_HOST_DEFAULT + ":" + HAS_HTTP_PORT_DEFAULT;
-
-  public static final String HAS_HTTPS_BIND_HOST_KEY = "has.https-bind-host";
-  public static final int HAS_HTTPS_PORT_DEFAULT = 9871;
-  public static final String HAS_HTTPS_HOST_DEFAULT = "0.0.0.0";
-  public static final String HAS_HTTPS_ADDRESS_KEY = "has.https-address";
-  public static final String HAS_HTTPS_ADDRESS_DEFAULT = HAS_HTTPS_HOST_DEFAULT + ":" + HAS_HTTPS_PORT_DEFAULT;
-  public static final String HAS_HTTP_POLICY_KEY = "has.http.policy";
-  public static final String HAS_HTTP_POLICY_DEFAULT = HttpConfig.Policy.HTTPS_ONLY.name();
-
-  public static final String HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY = "has.https.server.keystore.resource";
-  public static final String HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT = "ssl-server.xml";
-  public static final String HAS_SERVER_HTTPS_KEYPASSWORD_KEY = "ssl.server.keystore.keypassword";
-  public static final String HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY = "ssl.server.keystore.password";
-  public static final String HAS_SERVER_HTTPS_KEYSTORE_LOCATION_KEY = "ssl.server.keystore.location";
-  public static final String HAS_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY = "ssl.server.truststore.location";
-  public static final String HAS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY = "ssl.server.truststore.password";
-  public static final String HAS_CLIENT_HTTPS_NEED_AUTH_KEY = "has.client.https.need-auth";
-  public static final boolean HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT = false;
-
-  public static final String HAS_AUTHENTICATION_FILTER_KEY = "has.web.authentication.filter";
-  public static final String HAS_AUTHENTICATION_FILTER_DEFAULT = AuthenticationFilter.class.getName();
-
-  public static final String HAS_AUTHENTICATION_FILTER_AUTH_TYPE = "has.authentication.filter.auth.type";
-  public static final String HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY = "has.authentication.kerberos.principal";
-  public static final String HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY = "has.authentication.kerberos.keytab";
-  public static final String HAS_AUTHENTICATION_KERBEROS_NAME_RULES = "has.authentication.kerberos.name.rules";
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
deleted file mode 100644
index 3e5f832..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
+++ /dev/null
@@ -1,348 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.has.server.web;
-
-import org.apache.hadoop.HadoopIllegalArgumentException;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.hadoop.has.server.web.rest.HasApi;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.http.HttpServer2;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.ServletContext;
-import java.io.File;
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.URI;
-import java.util.HashMap;
-import java.util.Map;
-
-public class WebServer {
-    public static final Logger LOG = LoggerFactory.getLogger(WebServer.class);
-
-    private HttpServer2 httpServer;
-    private final HasConfig conf;
-
-    private InetSocketAddress httpAddress;
-    private InetSocketAddress httpsAddress;
-
-    protected static final String HAS_SERVER_ATTRIBUTE_KEY = "hasserver";
-
-    public WebServer(HasConfig conf) {
-        this.conf = conf;
-    }
-
-    public HasConfig getConf() {
-        return conf;
-    }
-
-    private void init() {
-
-        final String pathSpec = "/has/v1/*";
-
-        // add has packages
-        httpServer.addJerseyResourcePackage(HasApi.class
-                .getPackage().getName(),
-            pathSpec);
-    }
-
-    public void defineFilter() {
-        String authType = conf.getString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE);
-        if (authType.equals("kerberos")) {
-            // add authentication filter for webhdfs
-            final String className = conf.getString(
-                WebConfigKey.HAS_AUTHENTICATION_FILTER_KEY,
-                WebConfigKey.HAS_AUTHENTICATION_FILTER_DEFAULT);
-
-            final String name = className;
-
-            Map<String, String> params = getAuthFilterParams(conf);
-
-            String adminPathSpec = "/has/v1/admin/*";
-            HttpServer2.defineFilter(httpServer.getWebAppContext(), name, className,
-                params, new String[]{adminPathSpec});
-            HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
-                + ")");
-        }
-    }
-
-    public void defineConfFilter() {
-        String confFilterName = ConfFilter.class.getName();
-        String confPath = "/has/v1/conf/*";
-        HttpServer2.defineFilter(httpServer.getWebAppContext(), confFilterName, confFilterName,
-                getAuthFilterParams(conf), new String[]{confPath});
-        HttpServer2.LOG.info("Added filter '" + confFilterName + "' (class=" + confFilterName
-                + ")");
-    }
-
-    private Map<String, String> getAuthFilterParams(HasConfig conf) {
-        Map<String, String> params = new HashMap<String, String>();
-
-        String authType = conf.getString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE);
-        if (authType != null && !authType.isEmpty()) {
-            params.put(AuthenticationFilter.AUTH_TYPE, authType);
-        }
-        String principal = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
-        if (principal != null && !principal.isEmpty()) {
-            try {
-                principal = SecurityUtil.getServerPrincipal(principal,
-                    getHttpsAddress().getHostName());
-            } catch (IOException e) {
-                LOG.warn("Errors occurred when get server principal. " + e.getMessage());
-            }
-            params.put(KerberosAuthenticationHandler.PRINCIPAL, principal);
-        }
-        String keytab = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
-        if (keytab != null && !keytab.isEmpty()) {
-            params.put(KerberosAuthenticationHandler.KEYTAB, keytab);
-        }
-        String rule = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_NAME_RULES);
-        if (rule != null && !rule.isEmpty()) {
-            params.put(KerberosAuthenticationHandler.NAME_RULES, rule);
-        } else {
-            params.put(KerberosAuthenticationHandler.NAME_RULES, "DEFAULT");
-        }
-        return params;
-    }
-
-    public InetSocketAddress getBindAddress() {
-        if (httpAddress != null) {
-            return httpAddress;
-        } else if (httpsAddress != null) {
-            return httpsAddress;
-        } else {
-            return null;
-        }
-    }
-
-    /**
-     * for information related to the different configuration options and
-     * Http Policy is decided.
-     */
-    public void start() throws HasException {
-
-        HttpConfig.Policy policy = getHttpPolicy(conf);
-
-        final String bindHost =
-            conf.getString(WebConfigKey.HAS_HTTPS_BIND_HOST_KEY);
-        InetSocketAddress httpAddr = null;
-        if (policy.isHttpEnabled()) {
-            final String httpAddrString = conf.getString(
-                WebConfigKey.HAS_HTTP_ADDRESS_KEY,
-                WebConfigKey.HAS_HTTP_ADDRESS_DEFAULT);
-            httpAddr = NetUtils.createSocketAddr(httpAddrString);
-            if (bindHost != null && !bindHost.isEmpty()) {
-                httpAddr = new InetSocketAddress(bindHost, httpAddr.getPort());
-            }
-            LOG.info("Get the http address: " + httpAddr);
-        }
-
-        InetSocketAddress httpsAddr = null;
-        if (policy.isHttpsEnabled()) {
-            final String httpsAddrString = conf.getString(
-                WebConfigKey.HAS_HTTPS_ADDRESS_KEY,
-                WebConfigKey.HAS_HTTPS_ADDRESS_DEFAULT);
-            httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
-
-            if (bindHost != null && !bindHost.isEmpty()) {
-                httpsAddr = new InetSocketAddress(bindHost, httpsAddr.getPort());
-            }
-            LOG.info("Get the https address: " + httpsAddr);
-        }
-
-        HttpServer2.Builder builder = httpServerTemplateForHAS(conf, httpAddr, httpsAddr, "has");
-
-        try {
-            httpServer = builder.build();
-        } catch (IOException e) {
-            throw new HasException("Errors occurred when building http server. " + e.getMessage());
-        }
-
-        init();
-
-        try {
-            httpServer.start();
-        } catch (IOException e) {
-            throw new HasException("Errors occurred when starting http server. " + e.getMessage());
-        }
-        int connIdx = 0;
-        if (policy.isHttpEnabled()) {
-            httpAddress = httpServer.getConnectorAddress(connIdx++);
-            conf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY,
-                NetUtils.getHostPortString(httpAddress));
-        }
-
-        if (policy.isHttpsEnabled()) {
-            httpsAddress = httpServer.getConnectorAddress(connIdx);
-            conf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY,
-                NetUtils.getHostPortString(httpsAddress));
-        }
-    }
-
-    public void setWebServerAttribute(HasServer hasServer) {
-        httpServer.setAttribute(HAS_SERVER_ATTRIBUTE_KEY, hasServer);
-    }
-
-    public static HasServer getHasServerFromContext(ServletContext context) {
-        return (HasServer) context.getAttribute(HAS_SERVER_ATTRIBUTE_KEY);
-    }
-
-    /**
-     * Get http policy.
-     */
-    public HttpConfig.Policy getHttpPolicy(HasConfig conf) {
-        String policyStr = conf.getString(WebConfigKey.HAS_HTTP_POLICY_KEY,
-            WebConfigKey.HAS_HTTP_POLICY_DEFAULT);
-        HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
-        if (policy == null) {
-            throw new HadoopIllegalArgumentException("Unregonized value '"
-                + policyStr + "' for " + WebConfigKey.HAS_HTTP_POLICY_KEY);
-        }
-
-        conf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, policy.name());
-        return policy;
-    }
-
-    /**
-     * Return a HttpServer.Builder that the ssm can use to
-     * initialize their HTTP / HTTPS server.
-     */
-    public HttpServer2.Builder httpServerTemplateForHAS(
-        HasConfig conf, final InetSocketAddress httpAddr, final InetSocketAddress httpsAddr,
-        String name) throws HasException {
-        HttpConfig.Policy policy = getHttpPolicy(conf);
-
-        HttpServer2.Builder builder = new HttpServer2.Builder().setName(name);
-
-        if (policy.isHttpEnabled()) {
-            if (httpAddr.getPort() == 0) {
-                builder.setFindPort(true);
-            }
-
-            URI uri = URI.create("http://" + NetUtils.getHostPortString(httpAddr));
-            builder.addEndpoint(uri);
-            LOG.info("Starting Web-server for " + name + " at: " + uri);
-        }
-
-        if (policy.isHttpsEnabled() && httpsAddr != null) {
-            HasConfig sslConf = loadSslConfiguration(conf);
-            loadSslConfToHttpServerBuilder(builder, sslConf);
-
-            if (httpsAddr.getPort() == 0) {
-                builder.setFindPort(true);
-            }
-
-            URI uri = URI.create("https://" + NetUtils.getHostPortString(httpsAddr));
-            builder.addEndpoint(uri);
-            LOG.info("Starting Web-server for " + name + " at: " + uri);
-        }
-
-        return builder;
-    }
-
-    /**
-     * Load HTTPS-related configuration.
-     */
-    public HasConfig loadSslConfiguration(HasConfig conf) throws HasException {
-        HasConfig sslConf = new HasConfig();
-
-        String sslConfigString = conf.getString(
-            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
-            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT);
-        LOG.info("Get the ssl config file: " + sslConfigString);
-        try {
-            sslConf.addIniConfig(new File(sslConfigString));
-        } catch (IOException e) {
-            throw new HasException("Errors occurred when adding config. " + e.getMessage());
-        }
-
-        final String[] reqSslProps = {
-            WebConfigKey.HAS_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY,
-            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_LOCATION_KEY,
-            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY,
-            WebConfigKey.HAS_SERVER_HTTPS_KEYPASSWORD_KEY
-        };
-
-        // Check if the required properties are included
-        for (String sslProp : reqSslProps) {
-            if (sslConf.getString(sslProp) == null) {
-                LOG.warn("SSL config " + sslProp + " is missing. If "
-                    + WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY
-                    + " is specified, make sure it is a relative path");
-            }
-        }
-
-        boolean requireClientAuth = conf.getBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY,
-            WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
-        sslConf.setBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY, requireClientAuth);
-        return sslConf;
-    }
-
-    public HttpServer2.Builder loadSslConfToHttpServerBuilder(HttpServer2.Builder builder,
-                                                              HasConfig sslConf) {
-        return builder
-            .needsClientAuth(
-                sslConf.getBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY,
-                    WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT))
-            .keyPassword(getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_KEYPASSWORD_KEY))
-            .keyStore(sslConf.getString("ssl.server.keystore.location"),
-                getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY),
-                sslConf.getString("ssl.server.keystore.type", "jks"))
-            .trustStore(sslConf.getString("ssl.server.truststore.location"),
-                getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY),
-                sslConf.getString("ssl.server.truststore.type", "jks"))
-            .excludeCiphers(
-                sslConf.getString("ssl.server.exclude.cipher.list"));
-    }
-
-    /**
-     * Leverages the Configuration.getPassword method to attempt to get
-     * passwords from the CredentialProvider API before falling back to
-     * clear text in config - if falling back is allowed.
-     *
-     * @param conf  Configuration instance
-     * @param alias name of the credential to retreive
-     * @return String credential value or null
-     */
-    public String getPassword(HasConfig conf, String alias) {
-
-        return conf.getString(alias);
-    }
-
-    public void stop() throws Exception {
-        if (httpServer != null) {
-            httpServer.stop();
-        }
-    }
-
-    public InetSocketAddress getHttpAddress() {
-        return httpAddress;
-    }
-
-    public InetSocketAddress getHttpsAddress() {
-        return httpsAddress;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
deleted file mode 100644
index a6fc4ce..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest;
-
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.hadoop.has.server.web.WebServer;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
-import org.apache.kerby.kerberos.kerb.server.KdcUtil;
-
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * HAS configure web methods implementation.
- */
-@Path("/conf")
-public class ConfApi {
-
-    @Context
-    private ServletContext context;
-
-    @Context
-    private HttpServletRequest httpRequest;
-
-    /**
-     * Set HAS plugin.
-     *
-     * @param plugin HAS plugin name
-     * @return Response
-     */
-    @PUT
-    @Path("/setplugin")
-    @Consumes({MediaType.TEXT_PLAIN})
-    @Produces({MediaType.TEXT_PLAIN})
-    public Response setPlugin(@QueryParam("plugin") final String plugin) {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            WebServer.LOG.info("Set HAS plugin...");
-            try {
-                Map<String, String> values = new HashMap<>();
-                File hasConfFile = new File(hasServer.getConfDir(), "has-server.conf");
-                HasConfig hasConfig = HasUtil.getHasConfig(hasConfFile);
-                if (hasConfig != null) {
-                    String defaultValue = hasConfig.getPluginName();
-                    values.put(defaultValue, plugin);
-                } else {
-                    throw new RuntimeException("has-server.conf not found. ");
-                }
-                hasServer.updateConfFile("has-server.conf", values);
-            } catch (IOException | HasException e) {
-                throw new RuntimeException("Failed to set HAS plugin. ", e);
-            }
-            WebServer.LOG.info("HAS plugin set successfully.");
-
-            return Response.status(200).entity("HAS plugin set successfully.\n").build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Config HAS server backend.
-     *
-     * @param backendType type of backend
-     * @param dir         json dir
-     * @param driver      mysql JDBC connector driver
-     * @param url         mysql JDBC connector url
-     * @param user        mysql user name
-     * @param password    mysql password of user
-     * @return Response
-     */
-    @PUT
-    @Path("/configkdcbackend")
-    @Consumes({MediaType.APPLICATION_JSON})
-    @Produces({MediaType.TEXT_PLAIN})
-    public Response configKdcBackend(
-        @QueryParam("backendType") final String backendType,
-        @QueryParam("dir") @DefaultValue("/tmp/has/jsonbackend") final String dir,
-        @QueryParam("driver") @DefaultValue("com.mysql.jdbc.Driver") final String driver,
-        @QueryParam("url") @DefaultValue("jdbc:mysql://127.0.0.1:3306/mysqlbackend") final String url,
-        @QueryParam("user") @DefaultValue("root") final String user,
-        @QueryParam("password") @DefaultValue("passwd") final String password) {
-
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            if ("json".equals(backendType)) {
-                WebServer.LOG.info("Set Json backend...");
-                try {
-                    Map<String, String> values = new HashMap<>();
-                    values.put("_JAR_", "org.apache.kerby.kerberos.kdc.identitybackend.JsonIdentityBackend");
-                    values.put("#_JSON_DIR_", "backend.json.dir = " + dir);
-                    values.put("#_MYSQL_\n", "");
-                    hasServer.updateConfFile("backend.conf", values);
-                } catch (IOException | HasException e) {
-                    throw new RuntimeException("Failed to set Json backend. ", e);
-                }
-                WebServer.LOG.info("Json backend set successfully.");
-
-                return Response.status(200).entity("Json backend set successfully.\n").build();
-            } else if ("mysql".equals(backendType)) {
-                WebServer.LOG.info("Set MySQL backend...");
-                try {
-                    String mysqlConfig = "mysql_driver = " + driver + "\nmysql_url = " + url
-                        + "\nmysql_user = " + user + "\nmysql_password = " + password;
-                    Map<String, String> values = new HashMap<>();
-                    values.put("_JAR_", "org.apache.hadoop.has.server.kdc.MySQLIdentityBackend");
-                    values.put("#_JSON_DIR_\n", "");
-                    values.put("#_MYSQL_", mysqlConfig);
-                    hasServer.updateConfFile("backend.conf", values);
-                } catch (IOException | HasException e) {
-                    throw new RuntimeException("Failed to set MySQL backend. ", e);
-                }
-                WebServer.LOG.info("MySQL backend set successfully.");
-
-                return Response.status(200).entity("MySQL backend set successfully.\n").build();
-            } else {
-                return Response.status(400).entity(backendType + " is not supported.\n").build();
-            }
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Config HAS server KDC.
-     * @param port KDC port to set
-     * @param realm KDC realm to set
-     * @param host KDC host to set
-     * @return Response
-     */
-    @PUT
-    @Path("/configkdc")
-    @Consumes({MediaType.TEXT_PLAIN})
-    @Produces({MediaType.TEXT_PLAIN})
-    public Response configKdc(
-        @QueryParam("port") final int port,
-        @QueryParam("realm") final String realm,
-        @QueryParam("host") final String host) {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            WebServer.LOG.info("Config HAS server KDC...");
-            try {
-                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
-                String backendJar = backendConfig.getString("kdc_identity_backend");
-                if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
-                    hasServer.configMySQLKdc(backendConfig, realm, port, host, hasServer);
-                } else {
-                    Map<String, String> values = new HashMap<>();
-                    values.put("_HOST_", host);
-                    values.put("_PORT_", String.valueOf(port));
-                    values.put("_REALM_", realm);
-                    hasServer.updateConfFile("kdc.conf", values);
-                    String kdc = "\t\tkdc = " + host + ":" + port;
-                    values.put("_KDCS_", kdc);
-                    values.put("_UDP_LIMIT_", "4096");
-                    hasServer.updateConfFile("krb5.conf", values);
-                }
-            } catch (IOException | HasException | KrbException e) {
-                throw new RuntimeException("Failed to config HAS KDC. ", e);
-            }
-            WebServer.LOG.info("HAS server KDC set successfully.");
-            return Response.status(200).entity("HAS server KDC set successfully.\n").build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java
deleted file mode 100644
index 1b84639..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HadminApi.java
+++ /dev/null
@@ -1,455 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest;
-
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.hadoop.has.server.admin.LocalHasAdmin;
-import org.apache.hadoop.has.server.web.HostRoleType;
-import org.apache.hadoop.has.server.web.WebServer;
-import org.apache.hadoop.has.server.web.rest.param.HostParam;
-import org.apache.hadoop.has.server.web.rest.param.HostRoleParam;
-import org.apache.hadoop.has.server.web.rest.param.PasswordParam;
-import org.apache.hadoop.has.server.web.rest.param.PrincipalParam;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.BufferedInputStream;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
-
-/**
- * HAS HasAdmin web methods implementation.
- */
-@Path("/admin")
-public class HadminApi {
-
-    @Context
-    private ServletContext context;
-
-    @Context
-    private HttpServletRequest httpRequest;
-
-    private void compressFile(File file, ZipOutputStream out, String basedir) {
-        if (!file.exists()) {
-            return;
-        }
-        try {
-            BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
-            ZipEntry entry = new ZipEntry(basedir + file.getName());
-            out.putNextEntry(entry);
-            int count;
-            byte[] data = new byte[8192];
-            while ((count = bis.read(data, 0, 8192)) != -1) {
-                out.write(data, 0, count);
-            }
-            bis.close();
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    /**
-     * @param host Hadoop node
-     * @param role Hadoop role
-     * @return Response
-     */
-    @GET
-    @Path("/exportkeytabs")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response exportKeytabs(@QueryParam(HostParam.NAME) @DefaultValue(HostParam.DEFAULT)
-                                  final HostParam host,
-                                  @QueryParam(HostRoleParam.NAME) @DefaultValue(HostRoleParam.DEFAULT)
-                                  final HostRoleParam role) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to export keytabs.");
-            LocalHasAdmin hasAdmin = null;
-            HasServer hasServer = null;
-            try {
-                hasServer = WebServer.getHasServerFromContext(context);
-                hasAdmin = new LocalHasAdmin(hasServer);
-            } catch (KrbException e) {
-                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-            }
-            if (host.getValue() != null) {
-                if (role.getValue() != null) {
-                    try {
-                        File file = hasAdmin.getKeytabByHostAndRole(host.getValue(), role.getValue());
-                        WebServer.LOG.info("Create keytab file for the " + role.getValue()
-                            + " for " + host.getValue());
-                        return Response.ok(file).header("Content-Disposition",
-                            "attachment; filename=" + role.getValue() + "-"
-                                + host.getValue() + ".keytab").build();
-                    } catch (HasException e) {
-                        WebServer.LOG.error("Failed to export keytab File because : " + e.getMessage());
-                    }
-                } else {
-                    //export keytabs zip file
-                    List<File> keytabs = new ArrayList<>();
-                    for (HostRoleType r : HostRoleType.values()) {
-                        try {
-                            keytabs.add(hasAdmin.getKeytabByHostAndRole(host.getValue(), r.getName()));
-                            WebServer.LOG.info("Create keytab file for the " + r.getName()
-                                + " for " + host.getValue());
-                        } catch (HasException e) {
-                            WebServer.LOG.info("Failed to export keytab File because : " + e.getMessage());
-                        }
-                    }
-                    if (keytabs.size() < 1) {
-                        return Response.serverError().build();
-                    }
-                    File path = new File(hasServer.getWorkDir(), "tmp/zip/"
-                        + System.currentTimeMillis());
-                    path.mkdirs();
-                    File keytabZip = new File(path, "keytab.zip");
-                    if (keytabZip.exists()) {
-                        keytabZip.delete();
-                    }
-                    try {
-                        ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(keytabZip));
-                        for (File keytab : keytabs) {
-                            compressFile(keytab, zos, "");
-                        }
-                        zos.close();
-                        WebServer.LOG.info("Success to create the keytab.zip.");
-                        return Response.ok(keytabZip).header("Content-Disposition",
-                            "attachment; filename=keytab.zip").build();
-                    } catch (Exception e) {
-                        WebServer.LOG.error("Failed to create the keytab.zip,because : " + e.getMessage());
-                    }
-                }
-            }
-            return Response.serverError().build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * export single keytab file
-     *
-     * @param principal principal name to export keytab file
-     * @return Response
-     */
-    @GET
-    @Path("/exportkeytab")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response exportKeytab(@QueryParam("principal") final String principal) {
-        if (httpRequest.isSecure()) {
-            LocalHasAdmin hasAdmin = null;
-            WebServer.LOG.info("Exporting keytab file for " + principal + "...");
-            try {
-                HasServer hasServer = WebServer.getHasServerFromContext(context);
-                hasAdmin = new LocalHasAdmin(hasServer);
-            } catch (KrbException e) {
-                WebServer.LOG.error("Failed to create local hadmin." + e.getMessage());
-            }
-            WebServer.LOG.info("Create keytab file for " + principal + " successfully.");
-            if (principal != null) {
-                try {
-                    File path = new File("/tmp/" + System.currentTimeMillis());
-                    if (path.mkdirs()) {
-                        File keytabFile = new File(path, principal + ".keytab");
-                        hasAdmin.exportKeytab(keytabFile, principal);
-                        return Response.ok(keytabFile).header("Content-Disposition", "attachment; filename="
-                            + keytabFile.getName()).build();
-                    }
-                } catch (HasException e) {
-                    WebServer.LOG.error("Failed to export keytab. " + e.toString());
-                    return Response.serverError().build();
-                }
-            }
-            return Response.serverError().build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @PUT
-    @Path("/setconf")
-    @Produces(MediaType.APPLICATION_JSON)
-    public Response setConf(@QueryParam("isEnable") String isEnable) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to admin/setconf.");
-            final HasServer hasServer = WebServer.getHasServerFromContext(
-                context);
-            File hasConf = new File(hasServer.getConfDir(), "has-server.conf");
-            if (!hasConf.exists()) {
-                WebServer.LOG.error("has-server.conf is not exists.");
-                return Response.serverError().entity("has-server.conf is not exists.")
-                    .build();
-            }
-            String result = "";
-            if (isEnable.equals("true")) {
-                result = "enable";
-            } else if (isEnable.equals("false")) {
-                result = "disable";
-            } else {
-                WebServer.LOG.error("Value of isEnable is error.");
-                return Response.serverError().entity("Value of isEnable is error.")
-                    .build();
-            }
-            try {
-                HasUtil.setEnableConf(hasConf, isEnable);
-            } catch (Exception e) {
-                WebServer.LOG.error(e.getMessage());
-                return Response.serverError().entity(e.getMessage()).build();
-            }
-            return Response.ok("Set conf to " + result).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @GET
-    @Path("/getprincipals")
-    @Produces(MediaType.APPLICATION_JSON)
-    public Response getprincipals(@QueryParam("exp") String exp) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to get principals.");
-            JSONObject result = new JSONObject();
-            String msg;
-            LocalHasAdmin hasAdmin = null;
-            try {
-                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
-            } catch (KrbException e) {
-                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-            }
-            try {
-                JSONArray principals = new JSONArray();
-                List<String> princList = hasAdmin.getPrincipals(exp);
-                for (String princ : princList) {
-                    principals.put(princ);
-                }
-                WebServer.LOG.info("Success to get principals with JSON.");
-                result.put("result", "success");
-                result.put("msg", principals.toString());
-                return Response.ok(result.toString()).build();
-            } catch (Exception e) {
-                WebServer.LOG.error("Failed to get principals,because : " + e.getMessage());
-                msg = "Failed to get principals,because : " + e.getMessage();
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Add principal by name and password.
-     *
-     * @param principal principal name.
-     * @param password  principal password
-     * @return Response
-     */
-    @POST
-    @Path("/addprincipal")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response addprincipal(@QueryParam(PrincipalParam.NAME) @DefaultValue(PrincipalParam.DEFAULT)
-                                 final PrincipalParam principal,
-                                 @QueryParam(PasswordParam.NAME) @DefaultValue(PasswordParam.DEFAULT)
-                                 final PasswordParam password) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to add the principal named " + principal.getValue());
-            LocalHasAdmin hasAdmin = null;
-            try {
-                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
-            } catch (KrbException e) {
-                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-            }
-            JSONObject result = new JSONObject();
-            String msg = "Add principal successfully.";
-            try {
-                hasAdmin.addPrincipal(principal.getValue(), password.getValue());
-                result.put("result", "success");
-                result.put("msg", msg);
-                return Response.ok(result.toString()).build();
-            } catch (Exception e) {
-                WebServer.LOG.error("Failed to add " + principal + " principal, because: " + e.getMessage());
-                msg = "Failed to add " + principal + " principal, because: " + e.getMessage();
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @POST
-    @Path("/renameprincipal")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response renamePrincipal(@QueryParam("oldprincipal") String oldPrincipal,
-                                    @QueryParam("newprincipal") String newPrincipal) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to rename " + oldPrincipal + " to " + newPrincipal);
-            JSONObject result = new JSONObject();
-            String msg = "Rename principal successfully.";
-            if (oldPrincipal != null && newPrincipal != null) {
-                LocalHasAdmin hasAdmin = null;
-                try {
-                    hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
-                } catch (KrbException e) {
-                    WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-                }
-                try {
-                    hasAdmin.renamePrincipal(oldPrincipal, newPrincipal);
-                    result.put("result", "success");
-                    result.put("msg", msg);
-                    return Response.ok(result.toString()).build();
-                } catch (Exception e) {
-                    WebServer.LOG.error("Failed to rename principal " + oldPrincipal + " to "
-                        + newPrincipal + ",because: " + e.getMessage());
-                    msg = "Failed to rename principal " + oldPrincipal + " to "
-                        + newPrincipal + ",because: " + e.getMessage();
-                }
-            } else {
-                WebServer.LOG.error("Value of old or new principal is null.");
-                msg = "Value of old or new principal is null.";
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Delete principal by name.
-     *
-     * @param principal principal like "admin" or "admin@HADOOP.COM".
-     * @return Response
-     */
-    @DELETE
-    @Path("/deleteprincipal")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response deleteprincipal(@QueryParam(PrincipalParam.NAME) @DefaultValue(PrincipalParam.DEFAULT)
-                                    final PrincipalParam principal) {
-        if (httpRequest.isSecure()) {
-            WebServer.LOG.info("Request to delete the principal named " + principal.getValue());
-            JSONObject result = new JSONObject();
-            String msg = "Delete principal successfully.";
-            LocalHasAdmin hasAdmin = null;
-            try {
-                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
-            } catch (KrbException e) {
-                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-            }
-            try {
-                hasAdmin.deletePrincipal(principal.getValue());
-                result.put("result", "success");
-                result.put("msg", msg);
-                return Response.ok(result.toString()).build();
-            } catch (Exception e) {
-                WebServer.LOG.error("Failed to delete the principal named " + principal.getValue()
-                    + ",because : " + e.getMessage());
-                msg = "Failed to delete the principal named " + principal.getValue()
-                    + ",because : " + e.getMessage();
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @PUT
-    @Path("/createprincipals")
-    @Consumes(MediaType.APPLICATION_JSON)
-    @Produces(MediaType.APPLICATION_JSON)
-    public Response createprincipals(@Context HttpServletRequest request) {
-        if (httpRequest.isSecure()) {
-            LocalHasAdmin hasAdmin = null;
-            try {
-                hasAdmin = new LocalHasAdmin(WebServer.getHasServerFromContext(context));
-            } catch (KrbException e) {
-                WebServer.LOG.info("Failed to create local hadmin." + e.getMessage());
-            }
-            JSONObject result = new JSONObject();
-            String msg = "";
-            try {
-                StringBuilder data = new StringBuilder();
-                BufferedReader br = new BufferedReader(new InputStreamReader(request.getInputStream()));
-                String s;
-                while ((s = br.readLine()) != null) {
-                    data.append(s);
-                }
-                WebServer.LOG.info("Request to create principals by JSON : \n" + data.toString());
-                JSONArray hostArray = new JSONObject(data.toString()).optJSONArray("HOSTS");
-                for (int i = 0; i < hostArray.length(); i++) {
-                    JSONObject host = (JSONObject) hostArray.get(i);
-                    String[] roles = host.getString("hostRoles").split(",");
-                    for (String role : roles) {
-                        msg += hasAdmin.addPrincByRole(host.getString("name"), role.toUpperCase());
-                    }
-                }
-                result.put("result", "success");
-                result.put("msg", msg);
-                return Response.ok(result.toString()).build();
-            } catch (Exception e) {
-                WebServer.LOG.error("Failed to create principals,because : " + e.getMessage());
-                msg = "Failed to create principals,because : " + e.getMessage();
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java
deleted file mode 100644
index a1eb958..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/HasApi.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasAuthenException;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.hadoop.has.server.HasServerPlugin;
-import org.apache.hadoop.has.server.HasServerPluginRegistry;
-import org.apache.hadoop.has.server.kdc.HasKdcHandler;
-import org.apache.hadoop.has.server.web.HostRoleType;
-import org.apache.hadoop.has.server.web.WebServer;
-import org.apache.hadoop.has.server.web.rest.param.AuthTokenParam;
-import org.apache.hadoop.has.server.web.rest.param.TypeParam;
-import org.apache.hadoop.http.JettyUtils;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.KrbRuntime;
-import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
-import org.apache.kerby.kerberos.kerb.provider.TokenDecoder;
-import org.apache.kerby.kerberos.kerb.server.KdcUtil;
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.File;
-import java.io.IOException;
-import java.util.Map;
-import java.util.TreeMap;
-
-/**
- * HAS web methods implementation.
- */
-@Path("")
-public class HasApi {
-
-    @Context
-    private ServletContext context;
-
-    @Context
-    private HttpServletRequest httpRequest;
-
-    /**
-     * Get krb5.conf file.
-     *
-     * @return Response
-     */
-    @GET
-    @Path("/getkrb5conf")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response getKrb5Conf() {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            try {
-                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
-                String backendJar = backendConfig.getString("kdc_identity_backend");
-                File conf;
-                if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
-                    conf = hasServer.generateKrb5Conf();
-                } else {
-                    File confDir = hasServer.getConfDir();
-                    conf = new File(confDir, "krb5.conf");
-                }
-                return Response.ok(conf).header("Content-Disposition", "attachment; filename=krb5.conf").build();
-            } catch (KrbException | HasException e) {
-                throw new RuntimeException("Failed to get Krb5.conf. ", e);
-            }
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Get has-client.conf file.
-     *
-     * @return Response
-     */
-    @GET
-    @Path("/gethasconf")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response getHasConf() {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            try {
-                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
-                String backendJar = backendConfig.getString("kdc_identity_backend");
-                File conf;
-                if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
-                    conf = hasServer.generateHasConf();
-                } else {
-                    File confDir = hasServer.getConfDir();
-                    conf = new File(confDir, "has-server.conf");
-                }
-                return Response.ok(conf).header("Content-Disposition", "attachment; filename=has-client.conf").build();
-            } catch (IOException | KrbException | HasException e) {
-                throw new RuntimeException("Failed to get has-client.conf. ", e);
-            }
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Get CA file.
-     *
-     * @return Response
-     */
-    @GET
-    @Path("/getcert")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response getCert() {
-        final HasServer hasServer = WebServer.getHasServerFromContext(context);
-        String errMessage = null;
-        File cert = null;
-        try {
-            HasConfig hasConfig = HasUtil.getHasConfig(
-                new File(hasServer.getConfDir(), "has-server.conf"));
-            if (hasConfig != null) {
-                String certPath = hasConfig.getSslClientCert();
-                cert = new File(certPath);
-                if (!cert.exists()) {
-                    errMessage = "Cert file not found in HAS server.";
-                    WebServer.LOG.error("Cert file not found in HAS server.");
-                }
-            } else {
-                errMessage = "has-server.conf not found.";
-                WebServer.LOG.error("has-server.conf not found.");
-            }
-        } catch (HasException e) {
-            errMessage = "Failed to get cert file" + e.getMessage();
-            WebServer.LOG.error("Failed to get cert file" + e.getMessage());
-        }
-        if (errMessage == null) {
-            return Response.ok(cert).header("Content-Disposition",
-                "attachment;filename=" + cert.getName()).build();
-        } else {
-            return Response.status(Response.Status.NOT_FOUND).entity(errMessage).build();
-        }
-    }
-
-    @GET
-    @Path("/hostroles")
-    @Produces(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8)
-    public Response getRoles() {
-        if (httpRequest.isSecure()) {
-            JSONArray result = new JSONArray();
-            try {
-                for (HostRoleType role : HostRoleType.values()) {
-                    JSONObject jso = new JSONObject();
-                    jso.put("HostRole", role.getName());
-                    JSONArray jsa = new JSONArray();
-                    String[] princs = role.getPrincs();
-                    for (String princ : princs) {
-                        jsa.put(princ);
-                    }
-                    jso.put("PrincipalNames", jsa);
-                    result.put(jso);
-                }
-                return Response.ok(result.toString() + "\n").type(MediaType.APPLICATION_JSON).build();
-            } catch (Exception e) {
-                WebServer.LOG.error("Failed to get host roles." + e.getMessage());
-            }
-            return Response.serverError().build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @GET
-    @Path("/kdcinit")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response kdcInit() {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            try {
-                File adminKeytab = hasServer.initKdcServer();
-                return Response.ok(adminKeytab).header("Content-Disposition",
-                    "attachment; filename=" + adminKeytab.getName()).build();
-            } catch (KrbException e) {
-                System.err.println("[ERROR] " + e.getMessage());
-            }
-            return Response.serverError().build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    @GET
-    @Path("/kdcstart")
-    @Produces(MediaType.TEXT_PLAIN)
-    public Response kdcStart() {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            JSONObject result = new JSONObject();
-            String msg = "Succeed in starting KDC server.";
-
-            try {
-                hasServer.startKdcServer();
-            } catch (HasException e) {
-                WebServer.LOG.error("Fail to start kdc server. " + e.getMessage());
-                msg = e.getMessage();
-            }
-            try {
-                result.put("result", "success");
-                result.put("msg", msg);
-                return Response.ok(result.toString()).build();
-            } catch (Exception e) {
-                WebServer.LOG.error(e.getMessage());
-                msg = e.getMessage();
-            }
-            try {
-                result.put("result", "error");
-                result.put("msg", msg);
-            } catch (JSONException e) {
-                WebServer.LOG.error(e.getMessage());
-            }
-            return Response.ok(result.toString()).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-
-    /**
-     * Handle HTTP PUT request.
-     */
-    @PUT
-    @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8,
-        MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8})
-    public Response asRequest(
-        @QueryParam(TypeParam.NAME) @DefaultValue(TypeParam.DEFAULT)
-        final TypeParam type,
-        @QueryParam(AuthTokenParam.NAME) @DefaultValue(AuthTokenParam.DEFAULT)
-        final AuthTokenParam authToken
-    ) {
-        return asRequest(type.getValue(), authToken.getValue());
-    }
-
-    private Response asRequest(String type, String tokenStr) {
-        if (httpRequest.isSecure()) {
-            final HasServer hasServer = WebServer.getHasServerFromContext(context);
-            String errMessage = null;
-            String js = null;
-            ObjectMapper mapper = new ObjectMapper();
-            final Map<String, Object> m = new TreeMap<String, Object>();
-
-            if (hasServer.getKdcServer() == null) {
-                errMessage = "Please start the has KDC server.";
-            } else if (!tokenStr.isEmpty() && tokenStr != null) {
-                HasKdcHandler kdcHandler = new HasKdcHandler(hasServer);
-
-                TokenDecoder tokenDecoder = KrbRuntime.getTokenProvider("JWT").createTokenDecoder();
-
-                AuthToken authToken = null;
-                try {
-                    authToken = tokenDecoder.decodeFromString(tokenStr);
-                } catch (IOException e) {
-                    errMessage = "Failed to decode the token string." + e.getMessage();
-                    WebServer.LOG.error(errMessage);
-                }
-                HasServerPlugin tokenPlugin = null;
-                try {
-                    tokenPlugin = HasServerPluginRegistry.createPlugin(type);
-                } catch (HasException e) {
-                    errMessage = "Fail to get the plugin: " + type + ". " + e.getMessage();
-                    WebServer.LOG.error(errMessage);
-                }
-                AuthToken verifiedAuthToken;
-                try {
-                    verifiedAuthToken = tokenPlugin.authenticate(authToken);
-                } catch (HasAuthenException e) {
-                    errMessage = "Failed to verify auth token: " + e.getMessage();
-                    WebServer.LOG.error(errMessage);
-                    verifiedAuthToken = null;
-                }
-
-                if (verifiedAuthToken != null) {
-                    KrbMessage asRep = kdcHandler.getResponse(verifiedAuthToken,
-                        (String) verifiedAuthToken.getAttributes().get("passPhrase"));
-
-                    Base64 base64 = new Base64(0);
-                    try {
-                        m.put("type", tokenPlugin.getLoginType());
-                        m.put("success", "true");
-                        m.put("krbMessage", base64.encodeToString(asRep.encode()));
-                    } catch (IOException e) {
-                        errMessage = "Failed to encode KrbMessage." + e.getMessage();
-                        WebServer.LOG.error(errMessage);
-                    }
-
-                }
-            } else {
-                errMessage = "The token string should not be empty.";
-                WebServer.LOG.error(errMessage);
-            }
-
-            if (errMessage != null) {
-                m.put("success", "false");
-                m.put("krbMessage", errMessage);
-            }
-            try {
-                js = mapper.writeValueAsString(m);
-            } catch (JsonProcessingException e) {
-                WebServer.LOG.error("Failed write values to string." + e.getMessage());
-            }
-            return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
-        }
-        return Response.status(403).entity("HTTPS required.\n").build();
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java
deleted file mode 100644
index 1df0312..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/AuthTokenParam.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class AuthTokenParam extends StringParam {
-  /**
-   * Parameter name.
-   */
-  public static final String NAME = "authToken";
-  /**
-   * Default parameter value.
-   */
-  public static final String DEFAULT = "";
-
-  private static final StringParam.Domain DOMAIN = new StringParam.Domain(NAME, null);
-
-  /**
-   * Constructor.
-   *
-   * @param str a string representation of the parameter value.
-   */
-  public AuthTokenParam(final String str) {
-    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java
deleted file mode 100644
index 6852ca7..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/EnumParam.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-import org.apache.hadoop.util.StringUtils;
-
-import java.util.Arrays;
-
-abstract class EnumParam<E extends Enum<E>>
-    extends Param<E, EnumParam.Domain<E>> {
-  EnumParam(final Domain<E> domain, final E value) {
-    super(domain, value);
-  }
-
-  /**
-   * The domain of the parameter.
-   */
-  static final class Domain<E extends Enum<E>> extends Param.Domain<E> {
-    private final Class<E> enumClass;
-
-    Domain(String name, Class<E> enumClass) {
-      super(name);
-      this.enumClass = enumClass;
-    }
-
-    @Override
-    public String getDomain() {
-      return Arrays.asList(enumClass.getEnumConstants()).toString();
-    }
-
-    @Override
-    E parse(String str) {
-      return Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java
deleted file mode 100644
index ee66ede..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostParam.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class HostParam extends StringParam {
-  /**
-   * Parameter name.
-   */
-  public static final String NAME = "host";
-  /**
-   * Default parameter value.
-   */
-  public static final String DEFAULT = "";
-
-  private static final Domain DOMAIN = new Domain(NAME, null);
-
-  /**
-   * Constructor.
-   *
-   * @param str a string representation of the parameter value.
-   */
-  public HostParam(final String str) {
-    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java
deleted file mode 100644
index 07e481f..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/HostRoleParam.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class HostRoleParam extends StringParam {
-  /**
-   * Parameter name.
-   */
-  public static final String NAME = "role";
-  /**
-   * Default parameter value.
-   */
-  public static final String DEFAULT = "";
-
-  private static final Domain DOMAIN = new Domain(NAME, null);
-
-  /**
-   * Constructor.
-   *
-   * @param str a string representation of the parameter value.
-   */
-  public HostRoleParam(final String str) {
-    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java
deleted file mode 100644
index 5e43683..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/Param.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-import java.io.UnsupportedEncodingException;
-import java.net.URLEncoder;
-import java.util.Arrays;
-import java.util.Comparator;
-
-/**
- * Base class of parameters.
- */
-public abstract class Param<T, D extends Param.Domain<T>> {
-  static final String NULL = "null";
-
-  static final Comparator<Param<?, ?>> NAME_CMP = new Comparator<Param<?, ?>>() {
-    @Override
-    public int compare(Param<?, ?> left, Param<?, ?> right) {
-      return left.getName().compareTo(right.getName());
-    }
-  };
-
-  /** Convert the parameters to a sorted String.
-   *
-   * @param separator URI parameter separator character
-   * @param parameters parameters to encode into a string
-   * @return the encoded URI string
-   */
-  public static String toSortedString(final String separator,
-                                      final Param<?, ?>... parameters) {
-    Arrays.sort(parameters, NAME_CMP);
-    final StringBuilder b = new StringBuilder();
-    try {
-      for (Param<?, ?> p : parameters) {
-        if (p.getValue() != null) {
-          b.append(separator)
-              .append(URLEncoder.encode(p.getName(), "UTF-8"))
-              .append("=")
-              .append(URLEncoder.encode(p.getValueString(), "UTF-8"));
-        }
-      }
-    } catch (UnsupportedEncodingException e) {
-      // Sane systems know about UTF-8, so this should never happen.
-      throw new RuntimeException(e);
-    }
-    return b.toString();
-  }
-
-  /** The domain of the parameter. */
-  final D domain;
-  /** The actual parameter value. */
-  final T value;
-
-  Param(final D domain, final T value) {
-    this.domain = domain;
-    this.value = value;
-  }
-
-  /** @return the parameter value. */
-  public final T getValue() {
-    return value;
-  }
-
-  /** @return the parameter value as a string */
-  public abstract String getValueString();
-
-  /** @return the parameter name. */
-  public abstract String getName();
-
-  @Override
-  public String toString() {
-    return getName() + "=" + value;
-  }
-
-  /** Base class of parameter domains. */
-  abstract static class Domain<T> {
-    /** Parameter name. */
-    final String paramName;
-
-    Domain(final String paramName) {
-      this.paramName = paramName;
-    }
-
-    /** @return the parameter name. */
-    public final String getParamName() {
-      return paramName;
-    }
-
-    /** @return a string description of the domain of the parameter. */
-    public abstract String getDomain();
-
-    /** @return the parameter value represented by the string. */
-    abstract T parse(String str);
-
-    /** Parse the given string.
-     * @return the parameter value represented by the string.
-     */
-    public final T parse(final String varName, final String str) {
-      try {
-        return str != null && str.trim().length() > 0 ? parse(str) : null;
-      } catch (Exception e) {
-        throw new IllegalArgumentException("Failed to parse \"" + str
-            + "\" for the parameter " + varName
-            + ".  The value must be in the domain " + getDomain(), e);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java
deleted file mode 100644
index 045cc96..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PasswordParam.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class PasswordParam extends StringParam {
-  /**
-   * Parameter name.
-   */
-  public static final String NAME = "password";
-  /**
-   * Default parameter value.
-   */
-  public static final String DEFAULT = "";
-
-  private static final Domain DOMAIN = new Domain(NAME, null);
-
-  /**
-   * Constructor.
-   *
-   * @param str a string representation of the parameter value.
-   */
-  public PasswordParam(final String str) {
-    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java
deleted file mode 100644
index cabca21..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/PrincipalParam.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class PrincipalParam extends StringParam {
-  /**
-   * Parameter name.
-   */
-  public static final String NAME = "principal";
-  /**
-   * Default parameter value.
-   */
-  public static final String DEFAULT = "";
-
-  private static final Domain DOMAIN = new Domain(NAME, null);
-
-  /**
-   * Constructor.
-   *
-   * @param str a string representation of the parameter value.
-   */
-  public PrincipalParam(final String str) {
-    super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java
deleted file mode 100644
index b5eba07..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/StringParam.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-import java.util.regex.Pattern;
-
-/**
- * String parameter.
- */
-abstract class StringParam extends Param<String, StringParam.Domain> {
-  StringParam(final Domain domain, String str) {
-    super(domain, domain.parse(str));
-  }
-
-  /**
-   * @return the parameter value as a string
-   */
-  @Override
-  public String getValueString() {
-    return value;
-  }
-
-  /**
-   * The domain of the parameter.
-   */
-  static final class Domain extends Param.Domain<String> {
-    /**
-     * The pattern defining the domain; null .
-     */
-    private final Pattern pattern;
-
-    Domain(final String paramName, final Pattern pattern) {
-      super(paramName);
-      this.pattern = pattern;
-    }
-
-    @Override
-    public String getDomain() {
-      return pattern == null ? "<String>" : pattern.pattern();
-    }
-
-    @Override
-    String parse(String str) {
-      if (str != null && pattern != null) {
-        if (!pattern.matcher(str).matches()) {
-          throw new IllegalArgumentException("Invalid value: \"" + str
-              + "\" does not belong to the domain " + getDomain());
-        }
-      }
-      return str;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java
deleted file mode 100644
index da208a1..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/param/TypeParam.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server.web.rest.param;
-
-public class TypeParam extends StringParam {
-
-    /**
-     * Parameter name.
-     */
-    public static final String NAME = "type";
-    /**
-     * Default parameter value.
-     */
-    public static final String DEFAULT = "";
-
-    private static final Domain DOMAIN = new Domain(NAME, null);
-
-    /**
-     * Constructor.
-     *
-     * @param str a string representation of the parameter value.
-     */
-    public TypeParam(final String str) {
-        super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
-    }
-
-    @Override
-    public String getName() {
-        return NAME;
-    }
-}
-
-