You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by pl...@apache.org on 2017/11/15 05:12:11 UTC
[06/10] directory-kerby git commit: Add the HAS project to Kerby.
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/kdcinit.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/kdcinit.sh b/has/has-dist/bin/kdcinit.sh
new file mode 100644
index 0000000..f6e30c3
--- /dev/null
+++ b/has/has-dist/bin/kdcinit.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+CONF_DIR=$1
+APP_MAIN=org.apache.hadoop.has.tool.client.kdcinit.HasInitTool
+
+# Reset HAS_CONF_DIR if CONF_DIR not null
+if [ "$CONF_DIR" != "" ]; then
+ if [ ! -d "$CONF_DIR" ]; then
+ echo "[ERROR] ${CONF_DIR} is not a directory"
+ usage
+ fi
+else
+ if [ "$HAS_CONF_DIR" != "" ] && [ -d "$HAS_CONF_DIR" ]; then
+ CONF_DIR=${HAS_CONF_DIR}
+ else
+ echo "[ERROR] HAS_CONF_DIR is null or not a directory"
+ exit
+ fi
+fi
+
+# Load HAS environment variables
+if [ -f "${CONF_DIR}/has-env.sh" ]; then
+ . "${CONF_DIR}/has-env.sh"
+fi
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+ if [ X"$var" = X"-D" ]; then
+ DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8001,server=y,suspend=y"
+ fi
+done
+
+echo "[INFO] conf_dir=$CONF_DIR"
+HAS_OPTS="-DHAS_LOGFILE=kdcinit"
+
+java ${DEBUG} -classpath target/lib/*:. ${HAS_OPTS} ${APP_MAIN} ${CONF_DIR}
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/kinit.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/kinit.sh b/has/has-dist/bin/kinit.sh
new file mode 100644
index 0000000..3d605d6
--- /dev/null
+++ b/has/has-dist/bin/kinit.sh
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+APP_MAIN=org.apache.hadoop.has.tool.client.kinit.KinitTool
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+ if [ X"$var" = X"-D" ]; then
+ DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8005,server=y,suspend=y"
+ else
+ args="$args $var"
+ fi
+done
+
+java $DEBUG \
+-classpath target/lib/*:. \
+-DHAS_LOGFILE=kinit \
+${APP_MAIN} $args
+
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/klist.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/klist.sh b/has/has-dist/bin/klist.sh
new file mode 100644
index 0000000..0643ae7
--- /dev/null
+++ b/has/has-dist/bin/klist.sh
@@ -0,0 +1,37 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+APP_MAIN=org.apache.hadoop.has.tool.client.klist.KlistTool
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+ if [ X"$var" = X"-D" ]; then
+ DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8006,server=y,suspend=y"
+ else
+ args="$args $var"
+ fi
+done
+
+java $DEBUG \
+-classpath target/lib/*:. \
+-DHAS_LOGFILE=klist \
+${APP_MAIN} $args
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/login-test.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/login-test.sh b/has/has-dist/bin/login-test.sh
new file mode 100644
index 0000000..f26b1df
--- /dev/null
+++ b/has/has-dist/bin/login-test.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+APP_MAIN=org.apache.hadoop.has.tool.client.hclient.HasClientLoginTool
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+ if [ X"$var" = X"-D" ]; then
+ DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8004,server=y,suspend=y"
+ else
+ args="$args $var"
+ fi
+done
+
+java ${DEBUG} -classpath target/lib/*:. ${APP_MAIN} ${args}
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/quick-start.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/quick-start.sh b/has/has-dist/bin/quick-start.sh
new file mode 100644
index 0000000..fbe6812
--- /dev/null
+++ b/has/has-dist/bin/quick-start.sh
@@ -0,0 +1,68 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+BASE_DIR=$(cd `dirname $0`/..; pwd)
+
+# 1. Start HAS server
+echo "Start HAS server..."
+sudo sh $BASE_DIR/bin/start-has.sh $BASE_DIR/conf $BASE_DIR/conf &
+sleep 3s
+cat nohup.log
+
+# 2. Config Backend
+echo "Config Backend..."
+curl -X PUT "http://localhost:8091/has/v1/conf/configKdcBackend?backendType=json&dir=/tmp/has/jsonbackend"
+sleep 2s
+
+# 3. Set Realm
+echo "Set Realm..."
+curl -X PUT "http://localhost:8091/has/v1/conf/setKdcRealm?realm=ALIYUN.COM"
+sleep 2s
+
+# 4. Start HAS
+curl -X GET "http://localhost:8091/has/v1/kdcstart"
+sleep 2s
+
+# 5. Init HAS
+echo "Init HAS..."
+curl -o admin.keytab "http://host:8091/has/v1/kdcinit"
+sleep 2s
+
+# 6. Create Principals
+echo "Create Principals..."
+echo \
+{\
+ HOSTS: [\
+ \{\"name\":\"nn\",\"hostRoles\":\"HDFS,YARN,HBASE,ZOOKEEPER\"\}, \
+ \{\"name\":\"dn1\",\"hostRoles\":\"HDFS,YARN,HBASE,ZOOKEEPER\"\}, \
+ \{\"name\":\"dn2\",\"hostRoles\":\"HDFS,YARN,HBASE,ZOOKEEPER\"\} \
+ ] \
+\} > hosts.txt
+curl -T hosts.txt "http://localhost:8091/has/v1/admin/createprincipals"
+sleep 2s
+
+# 7. Get Host Roles List
+echo "Get host roles list..."
+curl -X GET "http://localhost:8091/has/v1/hostroles"
+sleep 2s
+
+# 8. Export keytab files
+echo "Export keytab files..."
+curl -o nn_keytab.zip "http://localhost:8091/has/v1/admin/exportkeytabs?host=nn"
+curl -o dn1_keytab.zip "http://localhost:8091/has/v1/admin/exportkeytabs?host=dn1"
+curl -o dn2_keytab.zip "http://localhost:8091/has/v1/admin/exportkeytabs?host=dn2"
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/start-has.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/start-has.sh b/has/has-dist/bin/start-has.sh
new file mode 100644
index 0000000..95a6913
--- /dev/null
+++ b/has/has-dist/bin/start-has.sh
@@ -0,0 +1,115 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+usage()
+{
+ echo "Usage: sh bin/start-has.sh <conf_dir> <working_dir>"
+ echo " Example:"
+ echo " sh bin/start-has.sh conf work"
+ exit
+}
+
+CONF_DIR=$1
+WORK_DIR=$2
+pid=/tmp/has.pid # Pid file to save pid numbers
+APP_MAIN=org.apache.hadoop.has.server.HasServer
+
+# Reset HAS_CONF_DIR and HAS_WORK_DIR if CONF_DIR or WORK_DIR not null
+if [ "$CONF_DIR" != "" ]; then
+ if [ ! -d "$CONF_DIR" ]; then
+ echo "[ERROR] ${CONF_DIR} is not a directory"
+ usage
+ fi
+else
+ if [ "$HAS_CONF_DIR" != "" ] && [ -d "$HAS_CONF_DIR" ]; then
+ CONF_DIR=${HAS_CONF_DIR}
+ else
+ echo "[ERROR] HAS_CONF_DIR is null or not a directory"
+ exit
+ fi
+fi
+
+# Load HAS environment variables
+if [ -f "${CONF_DIR}/has-env.sh" ]; then
+ . "${CONF_DIR}/has-env.sh"
+fi
+
+if [ "${WORK_DIR}" != "" ]; then
+ if [ ! -d "$WORK_DIR" ]; then
+ echo "[ERROR] ${WORK_DIR} is not a directory"
+ usage
+ fi
+else
+ if [ "$HAS_WORK_DIR" != "" ] && [ -d "$HAS_WORK_DIR" ]; then
+ WORK_DIR=${HAS_WORK_DIR}
+ else
+ echo "[ERROR] HAS_WORK_DIR is null or not a directory"
+ exit
+ fi
+fi
+
+# Get HAS_HOME directory
+bin=`dirname "$0"`
+HAS_HOME=`cd ${bin}/..; pwd`
+cd ${HAS_HOME}
+
+for var in $*; do
+ if [ X"$var" = X"-D" ]; then
+ DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,address=8000,server=y,suspend=n"
+ fi
+done
+args="$CONF_DIR $WORK_DIR"
+
+echo "[INFO] conf_dir=$CONF_DIR"
+echo "[INFO] work_dir=$WORK_DIR"
+
+HAS_OPTS="$HAS_JVM_OPTS -DHAS_LOGFILE=has"
+
+# Print a warning if has servers are already running
+if [ -f ${pid} ]; then
+ active=()
+ while IFS='' read -r p || [ -n "$p" ]; do
+ kill -0 ${p} >/dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ active+=(${p})
+ fi
+ done < "$pid"
+
+ count="${#active[@]}"
+
+ if [ "$count" -gt 0 ]; then
+ echo "[WARN] ${count} instance(s) of HAS server are already running."
+ fi
+fi
+
+echo "Starting HAS server..."
+
+# Start HAS server
+java ${DEBUG} -classpath target/lib/*:. ${HAS_OPTS} ${APP_MAIN} -start ${args} > /dev/null 2>&1 &
+
+mypid=$!
+
+# Add mypid to pid file if start successfully
+sleep 3
+if [ "$mypid" -gt 0 ] && kill -0 "$mypid" > /dev/null 2>&1; then
+ echo ${mypid} >> ${pid}
+ echo "[SUCCESS] HAS server (pid: ${mypid}) has been started."
+else
+ echo "[ERROR] Failed to start HAS server."
+ exit 1
+fi
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/bin/stop-has.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/bin/stop-has.sh b/has/has-dist/bin/stop-has.sh
new file mode 100644
index 0000000..6ca414d
--- /dev/null
+++ b/has/has-dist/bin/stop-has.sh
@@ -0,0 +1,75 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+OPERATION=$1
+pid=/tmp/has.pid # Pid file
+
+stop()
+{
+ if kill -0 ${to_stop} > /dev/null 2>&1; then
+ echo "Stopping HAS server (pid: ${to_stop})..."
+ kill ${to_stop}
+ sleep 5
+ if kill -0 ${pid} > /dev/null 2>&1; then
+ echo "[WARN] HAS server still alive after 5 seconds, Trying to kill it by force."
+ kill -9 ${to_stop}
+ else
+ echo "[SUCCESS] HAS server has been stopped."
+ fi
+ else
+ echo "[INFO] Skipping HAS server (pid: ${to_stop}), because it is not running anymore."
+ fi
+}
+
+case ${OPERATION} in
+
+ (all)
+ if [ -f "$pid" ]; then
+ mv ${pid} ${pid}.tmp
+ cat ${pid}.tmp | while read to_stop; do
+ stop
+ done < ${pid}.tmp
+ rm ${pid}.tmp
+ else
+ echo "[INFO] No HAS server to stop."
+ fi
+ ;;
+
+ (*)
+ if [ -f "$pid" ]; then
+ # Get latest pid number in pid file
+ to_stop=$(tail -n 1 ${pid})
+
+ if [ -z "$to_stop" ]; then
+ rm ${pid} # If $to_stop is null, delete the pid file
+ echo "[INFO] No HAS server to stop."
+ else
+ sed \$d ${pid} > ${pid}.tmp
+ if [ $(wc -l < ${pid}.tmp) -eq 0 ]; then
+ rm ${pid}.tmp ${pid} # If all stopped, clean up pid files
+ else
+ mv ${pid}.tmp ${pid}
+ fi
+ stop
+ fi
+
+ else
+ echo "[INFO] No HAS server to stop."
+ fi
+ ;;
+esac
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/backend.conf
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/backend.conf b/has/has-dist/conf/backend.conf
new file mode 100644
index 0000000..99c9d0a
--- /dev/null
+++ b/has/has-dist/conf/backend.conf
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+kdc_identity_backend = org.apache.kerby.kerberos.kdc.identitybackend.JsonIdentityBackend
+backend.json.dir = /tmp/has/jsonbackend
+mysql_url = jdbc:mysql://127.0.0.1:3306/mysqlbackend
+mysql_user = root
+mysql_password = passwd
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/hadmin.conf
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/hadmin.conf b/has/has-dist/conf/hadmin.conf
new file mode 100644
index 0000000..e950aea
--- /dev/null
+++ b/has/has-dist/conf/hadmin.conf
@@ -0,0 +1,6 @@
+[HAS]
+ https_host = plusplus-desktop
+ https_port = 8092
+ admin_keytab = /etc/has/admin.keytab
+ admin_keytab_principal = kadmin/HADOOP.COM@HADOOP.COM
+ filter_auth_type = kerberos
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/has-env.sh
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/has-env.sh b/has/has-dist/conf/has-env.sh
new file mode 100644
index 0000000..d390ee9
--- /dev/null
+++ b/has/has-dist/conf/has-env.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Set HAS environment variables here.
+
+###
+# Specify the JVM options to be used when starting HAS server.
+# These options will be appended to the options specified as HAS_OPTS
+#
+# export HAS_JVM_OPTS=""
+
+# HAS work directory
+#
+# export HAS_WORK_DIR=""
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/has-server.conf
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/has-server.conf b/has/has-dist/conf/has-server.conf
new file mode 100644
index 0000000..5fb801e
--- /dev/null
+++ b/has/has-dist/conf/has-server.conf
@@ -0,0 +1,27 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[HAS]
+ https_host = plusplus-desktop
+ https_port = 8092
+ filter_auth_type = kerberos
+ enable_conf = false
+ ssl_client_cert = /etc/has/cert-signed
+
+[PLUGIN]
+ auth_type = RAM
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/kdc.conf
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/kdc.conf b/has/has-dist/conf/kdc.conf
new file mode 100644
index 0000000..a2132ac
--- /dev/null
+++ b/has/has-dist/conf/kdc.conf
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[kdcdefaults]
+ kdc_host = plusplus-desktop
+ kdc_udp_port = 88
+ kdc_tcp_port = 88
+ kdc_realm = HADOOP.COM
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/conf/krb5.conf
----------------------------------------------------------------------
diff --git a/has/has-dist/conf/krb5.conf b/has/has-dist/conf/krb5.conf
new file mode 100644
index 0000000..6c64cf5
--- /dev/null
+++ b/has/has-dist/conf/krb5.conf
@@ -0,0 +1,29 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+[libdefaults]
+ kdc_realm = HADOOP.COM
+ default_realm = HADOOP.COM
+ udp_preference_limit = 4096
+ kdc_tcp_port = 88
+ kdc_udp_port = 88
+ ticket_lifetime = 60
+[realms]
+ HADOOP.COM = {
+ kdc = plusplus-desktop:88
+ }
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/log4j.properties
----------------------------------------------------------------------
diff --git a/has/has-dist/log4j.properties b/has/has-dist/log4j.properties
new file mode 100644
index 0000000..1b36fec
--- /dev/null
+++ b/has/has-dist/log4j.properties
@@ -0,0 +1,27 @@
+#############################################################################
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#############################################################################
+log4j.rootLogger=INFO,R
+
+log4j.appender.STDOUT=org.apache.log4j.ConsoleAppender
+log4j.appender.STDOUT.layout=org.apache.log4j.PatternLayout
+log4j.appender.STDOUT.layout.ConversionPattern=Logger-->%5p{%F:%L}-%m%n
+
+log4j.appender.R=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.R.File=logs/${HAS_LOGFILE}.log
+log4j.appender.R.layout=org.apache.log4j.PatternLayout
+log4j.appender.R.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %5p{%F:%L}-%m%n
+
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-dist/pom.xml b/has/has-dist/pom.xml
new file mode 100644
index 0000000..81eccc5
--- /dev/null
+++ b/has/has-dist/pom.xml
@@ -0,0 +1,91 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-project</artifactId>
+ <version>1.0.0-SNAPSHOT</version>
+ </parent>
+
+ <artifactId>has-dist</artifactId>
+ <description>HAS dist</description>
+ <name>HAS dist</name>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-server</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-client-tool</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-server-tool</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>target/lib</outputDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <configuration>
+ <appendAssemblyId>false</appendAssemblyId>
+ <descriptors>
+ <descriptor>assembly.xml</descriptor>
+ </descriptors>
+ </configuration>
+ <executions>
+ <execution>
+ <id>make-assembly</id>
+ <phase>package</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+
+</project>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/webapps/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/has/has-dist/webapps/WEB-INF/web.xml b/has/has-dist/webapps/WEB-INF/web.xml
new file mode 100644
index 0000000..b13cb1f
--- /dev/null
+++ b/has/has-dist/webapps/WEB-INF/web.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License. See accompanying LICENSE file.
+-->
+<web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee">
+
+</web-app>
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-dist/webapps/has/index.html
----------------------------------------------------------------------
diff --git a/has/has-dist/webapps/has/index.html b/has/has-dist/webapps/has/index.html
new file mode 100644
index 0000000..6f80950
--- /dev/null
+++ b/has/has-dist/webapps/has/index.html
@@ -0,0 +1,24 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head>
+<meta http-equiv="REFRESH" charset="UTF-8" />
+<title>HAS Administration</title>
+</head>
+</html>
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-plugins/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-plugins/pom.xml b/has/has-plugins/pom.xml
new file mode 100644
index 0000000..3cdc12a
--- /dev/null
+++ b/has/has-plugins/pom.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <artifactId>has-project</artifactId>
+ <groupId>org.apache.hadoop</groupId>
+ <version>1.0.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+
+ <artifactId>has-plugins</artifactId>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.aliyun</groupId>
+ <artifactId>aliyun-java-sdk-ram</artifactId>
+ <version>2.0.7</version>
+ </dependency>
+ <dependency>
+ <groupId>com.aliyun</groupId>
+ <artifactId>aliyun-java-sdk-core</artifactId>
+ <version>2.2.3</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.12</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-server</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+</project>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.client.HasClientPlugin
----------------------------------------------------------------------
diff --git a/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.client.HasClientPlugin b/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.client.HasClientPlugin
new file mode 100644
index 0000000..9f6edbc
--- /dev/null
+++ b/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.client.HasClientPlugin
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+org.apache.hadoop.has.plugins.client.aliyun.AliyunHasClientPlugin
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.server.HasServerPlugin
----------------------------------------------------------------------
diff --git a/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.server.HasServerPlugin b/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.server.HasServerPlugin
new file mode 100644
index 0000000..fa342e5
--- /dev/null
+++ b/has/has-plugins/src/main/resources/META-INF/services/org.apache.hadoop.has.server.HasServerPlugin
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+org.apache.hadoop.has.plugins.server.aliyun.AliyunHasServerPlugin
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasClientPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasClientPluginRegistry.java b/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasClientPluginRegistry.java
new file mode 100644
index 0000000..4c60250
--- /dev/null
+++ b/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasClientPluginRegistry.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.plugins;
+
+import org.apache.hadoop.has.client.HasClientPluginRegistry;
+import org.apache.hadoop.has.common.HasException;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.Set;
+
+public class TestHasClientPluginRegistry {
+
+ @Test
+ public void testInit() {
+ Set<String> pluginsNames = HasClientPluginRegistry.registeredPlugins();
+ Assert.assertTrue(pluginsNames.size() > 0);
+ }
+
+ @Test
+ public void testCreatePlugin() throws HasException {
+ Assert.assertTrue(HasClientPluginRegistry.createPlugin("RAM") != null);
+ Set<String> pluginNames = HasClientPluginRegistry.registeredPlugins();
+ for (String name : pluginNames) {
+ HasClientPluginRegistry.createPlugin(name);
+ }
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasServerPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasServerPluginRegistry.java b/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasServerPluginRegistry.java
new file mode 100644
index 0000000..78f307f
--- /dev/null
+++ b/has/has-plugins/src/test/java/org/apache/hadoop/has/plugins/TestHasServerPluginRegistry.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.plugins;
+
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.HasServerPluginRegistry;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.Set;
+
+public class TestHasServerPluginRegistry {
+
+ @Test
+ public void testInit() {
+ Set<String> pluginsNames = HasServerPluginRegistry.registeredPlugins();
+ Assert.assertTrue(pluginsNames.size() > 0);
+ }
+
+ @Test
+ public void testCreatePlugin() throws HasException {
+ Assert.assertTrue(HasServerPluginRegistry.createPlugin("RAM") != null);
+ Set<String> pluginNames = HasServerPluginRegistry.registeredPlugins();
+ for (String name : pluginNames) {
+ HasServerPluginRegistry.createPlugin(name);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-server/pom.xml b/has/has-server/pom.xml
new file mode 100644
index 0000000..30a4aa8
--- /dev/null
+++ b/has/has-server/pom.xml
@@ -0,0 +1,118 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-project</artifactId>
+ <version>1.0.0-SNAPSHOT</version>
+ </parent>
+
+ <artifactId>has-server</artifactId>
+ <description>HAS server</description>
+ <name>HAS server</name>
+ <packaging>jar</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk15on</artifactId>
+ <version>1.58</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
+ <version>3.5</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-dbutils</groupId>
+ <artifactId>commons-dbutils</artifactId>
+ <version>1.6</version>
+ </dependency>
+ <dependency>
+ <groupId>mysql</groupId>
+ <artifactId>mysql-connector-java</artifactId>
+ <version>5.1.42</version>
+ </dependency>
+ <dependency>
+ <groupId>com.h2database</groupId>
+ <artifactId>h2</artifactId>
+ <version>1.4.196</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.kerby</groupId>
+ <artifactId>kerby-config</artifactId>
+ <version>${kerby.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.kerby</groupId>
+ <artifactId>kerb-identity</artifactId>
+ <version>${kerby.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.kerby</groupId>
+ <artifactId>kerb-core</artifactId>
+ <version>${kerby.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.kerby</groupId>
+ <artifactId>kerb-server</artifactId>
+ <version>${kerby.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.directory.server</groupId>
+ <artifactId>apacheds-core-api</artifactId>
+ <version>2.0.0-M23</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.kerby</groupId>
+ <artifactId>kerb-server-api-all</artifactId>
+ <version>${kerby.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.kerby</groupId>
+ <artifactId>kerby-kdc</artifactId>
+ <version>${kerby.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.kerby</groupId>
+ <artifactId>json-backend</artifactId>
+ <version>${kerby.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.kerby</groupId>
+ <artifactId>token-provider</artifactId>
+ <version>${kerby.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>3.0.0-alpha2</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.11</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-client</artifactId>
+ <version>1.19</version>
+ </dependency>
+ <dependency>
+ <groupId>org.glassfish.jersey.core</groupId>
+ <artifactId>jersey-common</artifactId>
+ <version>RELEASE</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>has-common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+</project>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
new file mode 100644
index 0000000..b4cd5d6
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.kerby.kerberos.kerb.KrbRuntime;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class AbstractHasServerPlugin implements HasServerPlugin {
+
+ public static final Logger LOG = LoggerFactory.getLogger(AbstractHasServerPlugin.class);
+
+ protected abstract void doAuthenticate(AuthToken userToken, AuthToken authToken)
+ throws HasAuthenException;
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public AuthToken authenticate(AuthToken userToken) throws HasAuthenException {
+
+ AuthToken authToken = KrbRuntime.getTokenProvider("JWT").createTokenFactory().createToken();
+
+ doAuthenticate(userToken, authToken);
+
+ return authToken;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
new file mode 100644
index 0000000..14df580
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.hadoop.has.common.HasException;
+
+public class HasAuthenException extends HasException {
+ private static final long serialVersionUID = 171016915395892939L;
+
+ public HasAuthenException(Throwable cause) {
+ super(cause);
+ }
+
+ public HasAuthenException(String message) {
+ super(message);
+ }
+
+ public HasAuthenException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
new file mode 100644
index 0000000..cb22b8e
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
@@ -0,0 +1,701 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ *
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.commons.dbutils.DbUtils;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.web.WebConfigKey;
+import org.apache.hadoop.has.server.web.WebServer;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.kerby.kerberos.kdc.impl.NettyKdcServerImpl;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadmin;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadminImpl;
+import org.apache.kerby.kerberos.kerb.client.ClientUtil;
+import org.apache.kerby.kerberos.kerb.client.KrbConfig;
+import org.apache.kerby.kerberos.kerb.client.KrbSetting;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.identity.backend.IdentityBackend;
+import org.apache.kerby.kerberos.kerb.server.KdcServer;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+import org.apache.kerby.util.IOUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Pattern;
+
+/**
+ * The HAS KDC server implementation.
+ */
+public class HasServer {
+ public static final Logger LOG = LoggerFactory.getLogger(HasServer.class);
+
+ private static HasServer server = null;
+
+ private KrbSetting krbSetting;
+ private KdcServer kdcServer;
+ private WebServer webServer;
+ private File confDir;
+ private File workDir;
+ private String kdcHost;
+ private HasConfig hasConfig;
+
+ public HasServer(File confDir) throws KrbException {
+ this.confDir = confDir;
+ }
+
+ private void setConfDir(File confDir) {
+ this.confDir = confDir;
+ }
+
+ public File getConfDir() {
+ return confDir;
+ }
+
+ public File getWorkDir() {
+ return workDir;
+ }
+
+ public void setWorkDir(File workDir) {
+ this.workDir = workDir;
+ }
+
+ public void setKdcHost(String host) {
+ this.kdcHost = host;
+ }
+
+ public String getKdcHost() {
+ return kdcHost;
+ }
+
+ public KrbSetting getKrbSetting() {
+ return krbSetting;
+ }
+
+ public KdcServer getKdcServer() {
+ return kdcServer;
+ }
+
+ public WebServer getWebServer() {
+ return webServer;
+ }
+
+ public void setWebServer(WebServer webServer) {
+ this.webServer = webServer;
+ }
+
+ public void startKdcServer() throws HasException {
+ BackendConfig backendConfig;
+ try {
+ backendConfig = KdcUtil.getBackendConfig(getConfDir());
+ } catch (KrbException e) {
+ throw new HasException("Failed to get backend config. " + e);
+ }
+ String backendJar = backendConfig.getString("kdc_identity_backend");
+ if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
+ updateKdcConf();
+ }
+ try {
+ kdcServer = new KdcServer(confDir);
+ } catch (KrbException e) {
+ throw new HasException("Failed to create KdcServer. " + e);
+ }
+ kdcServer.setWorkDir(workDir);
+ kdcServer.setInnerKdcImpl(new NettyKdcServerImpl(kdcServer.getKdcSetting()));
+ try {
+ kdcServer.init();
+ } catch (KrbException e) {
+ LOG.error("Errors occurred when init has kdc server: " + e.getMessage());
+ throw new HasException("Errors occurred when init has kdc server: " + e.getMessage());
+ }
+
+ KrbConfig krbConfig = null;
+ try {
+ krbConfig = ClientUtil.getConfig(confDir);
+ } catch (KrbException e) {
+ new HasException("Errors occurred when getting the config from conf dir. "
+ + e.getMessage());
+ }
+ if (krbConfig == null) {
+ krbConfig = new KrbConfig();
+ }
+ this.krbSetting = new KrbSetting(krbConfig);
+ try {
+ kdcServer.start();
+ } catch (KrbException e) {
+ throw new HasException("Failed to start kdc server. " + e);
+ }
+ try {
+ HasUtil.setEnableConf(new File(confDir, "has-server.conf"), "false");
+ } catch (Exception e) {
+ throw new HasException("Failed to enable conf. " + e);
+ }
+ setHttpFilter();
+ }
+
+ private void setHttpFilter() throws HasException {
+ File httpKeytabFile = new File(workDir, "http.keytab");
+ LocalKadmin kadmin = new LocalKadminImpl(kdcServer.getKdcSetting(),
+ kdcServer.getIdentityService());
+ createHttpPrincipal(kadmin);
+ try {
+ kadmin.exportKeytab(httpKeytabFile, getHttpPrincipal());
+ } catch (KrbException e) {
+ throw new HasException("Failed to export keytab: " + e.getMessage());
+ }
+ webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE,
+ hasConfig.getFilterAuthType());
+ webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+ getHttpPrincipal());
+ webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
+ httpKeytabFile.getPath());
+ webServer.defineFilter();
+ }
+
+ public File initKdcServer() throws KrbException {
+ File adminKeytabFile = new File(workDir, "admin.keytab");
+ LocalKadmin kadmin = new LocalKadminImpl(kdcServer.getKdcSetting(),
+ kdcServer.getIdentityService());
+ if (adminKeytabFile.exists()) {
+ throw new KrbException("KDC Server is already inited.");
+ }
+ kadmin.createBuiltinPrincipals();
+ kadmin.exportKeytab(adminKeytabFile, kadmin.getKadminPrincipal());
+ System.out.println("The keytab for kadmin principal "
+ + " has been exported to the specified file "
+ + adminKeytabFile.getAbsolutePath() + ", please safely keep it, "
+ + "in order to use kadmin tool later");
+
+ return adminKeytabFile;
+ }
+
+ public void createHttpPrincipal(LocalKadmin kadmin) throws HasException {
+ String httpPrincipal = getHttpPrincipal();
+ IdentityBackend backend = kdcServer.getIdentityService();
+ try {
+ if (backend.getIdentity(httpPrincipal) == null) {
+ kadmin.addPrincipal(httpPrincipal);
+ } else {
+ LOG.info("The http principal already exists in backend.");
+ }
+ } catch (KrbException e) {
+ throw new HasException("Failed to add princial, " + e.getMessage());
+ }
+ }
+
+ public String getHttpPrincipal() throws HasException {
+ String realm = kdcServer.getKdcSetting().getKdcRealm();
+ String nameString;
+ try {
+ InetAddress addr = InetAddress.getLocalHost();
+ String fqName = addr.getCanonicalHostName();
+ nameString = "HTTP/" + fqName + "@" + realm;
+ } catch (UnknownHostException e) {
+ throw new HasException(e);
+ }
+ LOG.info("The http principal name is: " + nameString);
+ return nameString;
+ }
+
+ /**
+ * Update conf file.
+ *
+ * @param confName conf file name
+ * @param values customized values
+ * @throws IOException throw IOException
+ * @throws KrbException e
+ */
+ public void updateConfFile(String confName, Map<String, String> values)
+ throws IOException, HasException {
+ File confFile = new File(getConfDir().getAbsolutePath(), confName);
+ if (confFile.exists()) {
+ // Update conf file content
+ InputStream templateResource;
+ if (confName.equals("has-server.conf")) {
+ templateResource = new FileInputStream(confFile);
+ } else {
+ String resourcePath = "/" + confName + ".template";
+ templateResource = getClass().getResourceAsStream(resourcePath);
+ }
+ String content = IOUtil.readInput(templateResource);
+ for (Map.Entry<String, String> entry : values.entrySet()) {
+ content = content.replaceAll(Pattern.quote(entry.getKey()), entry.getValue());
+ }
+
+ // Delete the original conf file
+ boolean delete = confFile.delete();
+ if (!delete) {
+ throw new HasException("Failed to delete conf file: " + confName);
+ }
+
+ // Save the updated conf file
+ IOUtil.writeFile(content, confFile);
+ } else {
+ throw new HasException("Conf file: " + confName + " not found.");
+ }
+ }
+
+ /**
+ * Get KDC Config from MySQL.
+ *
+ * @return Kdc config
+ * @throws KrbException e
+ */
+ private Map<String, String> getKdcConf() throws HasException {
+ PreparedStatement preStm = null;
+ ResultSet result = null;
+ Map<String, String> kdcConf = new HashMap<>();
+ BackendConfig backendConfig;
+ try {
+ backendConfig = KdcUtil.getBackendConfig(getConfDir());
+ } catch (KrbException e) {
+ throw new HasException("Getting backend config failed." + e.getMessage());
+ }
+ String driver = backendConfig.getString("mysql_driver");
+ String url = backendConfig.getString("mysql_url");
+ String user = backendConfig.getString("mysql_user");
+ String password = backendConfig.getString("mysql_password");
+ Connection connection = startConnection(driver, url, user, password);
+ try {
+
+ // Get Kdc configuration from kdc_config table
+ String stmKdc = "SELECT * FROM `kdc_config` WHERE id = 1";
+ preStm = connection.prepareStatement(stmKdc);
+ result = preStm.executeQuery();
+ while (result.next()) {
+ String realm = result.getString("realm");
+ String servers = result.getString("servers");
+ String port = String.valueOf(result.getInt("port"));
+ kdcConf.put("servers", servers);
+ kdcConf.put("_PORT_", port);
+ kdcConf.put("_REALM_", realm);
+ }
+
+ } catch (SQLException e) {
+ LOG.error("Error occurred while getting kdc config.");
+ throw new HasException("Failed to get kdc config. ", e);
+ } finally {
+ DbUtils.closeQuietly(preStm);
+ DbUtils.closeQuietly(result);
+ DbUtils.closeQuietly(connection);
+ }
+
+ return kdcConf;
+ }
+
+ /**
+ * Update KDC conf file.
+ *
+ * @throws KrbException e
+ */
+ private void updateKdcConf() throws HasException {
+ try {
+ Map<String, String> values = getKdcConf();
+ String host = getKdcHost();
+ if (host == null) {
+ host = getWebServer().getBindAddress().getHostName();
+ }
+ values.remove("servers");
+ values.put("_HOST_", host);
+ updateConfFile("kdc.conf", values);
+ } catch (IOException e) {
+ throw new HasException("Failed to update kdc config. ", e);
+ }
+ }
+
+ /**
+ * Start the MySQL connection.
+ *
+ * @param url url of connection
+ * @param user username of connection
+ * @param password password of connection
+ * @throws KrbException e
+ * @return MySQL JDBC connection
+ */
+ private Connection startConnection(String driver, String url, String user,
+ String password) throws HasException {
+ Connection connection;
+ try {
+ Class.forName(driver);
+ connection = DriverManager.getConnection(url, user, password);
+ if (!connection.isClosed()) {
+ LOG.info("Succeeded in connecting to MySQL.");
+ }
+ } catch (ClassNotFoundException e) {
+ throw new HasException("JDBC Driver Class not found. ", e);
+ } catch (SQLException e) {
+ throw new HasException("Failed to connecting to MySQL. ", e);
+ }
+
+ return connection;
+ }
+
+ /**
+ * Config HAS server KDC which have MySQL backend.
+ * @param backendConfig MySQL backend config
+ * @param realm KDC realm to set
+ * @param host KDC host to set
+ * @param hasServer has server to get param
+ * @throws HasException e
+ */
+ public void configMySQLKdc(BackendConfig backendConfig, String realm, int port,
+ String host, HasServer hasServer) throws HasException {
+
+ // Start mysql connection
+ String driver = backendConfig.getString("mysql_driver");
+ String url = backendConfig.getString("mysql_url");
+ String user = backendConfig.getString("mysql_user");
+ String password = backendConfig.getString("mysql_password");
+ Connection connection = startConnection(driver, url, user, password);
+
+ ResultSet resConfig = null;
+ PreparedStatement preStm = null;
+ try {
+ createKdcTable(connection); // Create kdc_config table if not exists
+ String stm = "SELECT * FROM `kdc_config` WHERE id = 1";
+ preStm = connection.prepareStatement(stm);
+ resConfig = preStm.executeQuery();
+ if (!resConfig.next()) {
+ addKdcConfig(connection, realm, port, host);
+ } else {
+ String oldHost = hasServer.getKdcHost();
+ String servers = resConfig.getString("servers");
+ String[] serverArray = servers.split(",");
+ List<String> serverList = new ArrayList<>();
+ Collections.addAll(serverList, serverArray);
+ if (serverList.contains(oldHost)) {
+ servers = servers.replaceAll(oldHost, host);
+ } else {
+ servers = servers + "," + host;
+ }
+ boolean initialized = resConfig.getBoolean("initialized");
+ updateKdcConfig(connection, initialized, port, realm, servers);
+ }
+ hasServer.setKdcHost(host);
+ } catch (SQLException e) {
+ throw new HasException("Failed to config HAS KDC. ", e);
+ } finally {
+ DbUtils.closeQuietly(preStm);
+ DbUtils.closeQuietly(resConfig);
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ /**
+ * Create kdc_config table in database.
+ * @param conn database connection
+ * @throws KrbException e
+ */
+ private void createKdcTable(final Connection conn) throws HasException {
+ PreparedStatement preStm = null;
+ try {
+ String stm = "CREATE TABLE IF NOT EXISTS `kdc_config` ("
+ + "port INTEGER DEFAULT 88, servers VARCHAR(255) NOT NULL, "
+ + "initialized bool DEFAULT FALSE, realm VARCHAR(255) "
+ + "DEFAULT NULL, id INTEGER DEFAULT 1, CHECK (id=1), PRIMARY KEY (id)) "
+ + "ENGINE=INNODB;";
+ preStm = conn.prepareStatement(stm);
+ preStm.executeUpdate();
+ } catch (SQLException e) {
+ throw new HasException("Failed to create kdc_config table. ", e);
+ } finally {
+ DbUtils.closeQuietly(preStm);
+ }
+ }
+
+ /**
+ * Add KDC Config information in database.
+ * @param conn database connection
+ * @param realm realm to add
+ * @param port port to add
+ * @param host host to add
+ */
+ private void addKdcConfig(Connection conn, String realm, int port, String host)
+ throws HasException {
+ PreparedStatement preStm = null;
+ try {
+ String stm = "INSERT INTO `kdc_config` (port, servers, realm)" + " VALUES(?, ?, ?)";
+ preStm = conn.prepareStatement(stm);
+ preStm.setInt(1, port);
+ preStm.setString(2, host);
+ preStm.setString(3, realm);
+ preStm.executeUpdate();
+ } catch (SQLException e) {
+ throw new HasException("Failed to insert into kdc_config table. ", e);
+ } finally {
+ DbUtils.closeQuietly(preStm);
+ }
+ }
+
+ /**
+ * Update KDC Config record in database.
+ * @param conn database connection
+ * @param realm realm to update
+ * @param port port to update
+ * @param servers servers to update
+ * @param initialized initial state of KDC Config
+ */
+ private void updateKdcConfig(Connection conn, boolean initialized, int port,
+ String realm, String servers) throws HasException {
+ PreparedStatement preStm = null;
+ try {
+ if (initialized) {
+ String stmUpdate = "UPDATE `kdc_config` SET servers = ? WHERE id = 1";
+ preStm = conn.prepareStatement(stmUpdate);
+ preStm.setString(1, servers);
+ preStm.executeUpdate();
+ } else {
+ String stmUpdate = "UPDATE `kdc_config` SET port = ?, realm = ?, servers = ? WHERE id = 1";
+ preStm = conn.prepareStatement(stmUpdate);
+ preStm.setInt(1, port);
+ preStm.setString(2, realm);
+ preStm.setString(3, servers);
+ preStm.executeUpdate();
+ }
+ } catch (SQLException e) {
+ throw new HasException("Failed to update KDC Config. ", e);
+ } finally {
+ DbUtils.closeQuietly(preStm);
+ }
+ }
+
+ /**
+ * Read in krb5-template.conf and substitute in the correct port.
+ *
+ * @return krb5 conf file
+ * @throws IOException e
+ * @throws KrbException e
+ */
+ public File generateKrb5Conf() throws HasException {
+ Map<String, String> kdcConf = getKdcConf();
+ String[] servers = kdcConf.get("servers").split(",");
+ int kdcPort = Integer.parseInt(kdcConf.get("_PORT_"));
+ String kdcRealm = kdcConf.get("_REALM_");
+ StringBuilder kdcBuilder = new StringBuilder();
+ for (String server : servers) {
+ String append = "\t\tkdc = " + server.trim() + ":" + kdcPort + "\n";
+ kdcBuilder.append(append);
+ }
+ String kdc = kdcBuilder.toString();
+ kdc = kdc.substring(0, kdc.length() - 1);
+ String resourcePath = "/krb5.conf.template";
+ InputStream templateResource = getClass().getResourceAsStream(resourcePath);
+ String content = null;
+ try {
+ content = IOUtil.readInput(templateResource);
+ } catch (IOException e) {
+ throw new HasException("Read template resource failed. " + e);
+ }
+ content = content.replaceAll("_REALM_", kdcRealm);
+ content = content.replaceAll("_PORT_", String.valueOf(kdcPort));
+ content = content.replaceAll("_UDP_LIMIT_", "4096");
+ content = content.replaceAll("_KDCS_", kdc);
+ File confFile = new File(confDir, "krb5.conf");
+ if (confFile.exists()) {
+ boolean delete = confFile.delete();
+ if (!delete) {
+ throw new HasException("File delete error!");
+ }
+ }
+ try {
+ IOUtil.writeFile(content, confFile);
+ } catch (IOException e) {
+ throw new HasException("Write content to conf file failed. " + e);
+ }
+
+ return confFile;
+ }
+
+ /**
+ * Read in has-server.conf and create has-client.conf.
+ *
+ * @return has conf file
+ * @throws IOException e
+ * @throws HasException e
+ */
+ public File generateHasConf() throws HasException, IOException {
+ Map<String, String> kdcConf = getKdcConf();
+ String servers = kdcConf.get("servers");
+ File confFile = new File(getConfDir().getAbsolutePath(), "has-server.conf");
+ HasConfig hasConfig = HasUtil.getHasConfig(confFile);
+ if (hasConfig != null) {
+ String defaultValue = hasConfig.getHttpsHost();
+ InputStream templateResource = new FileInputStream(confFile);
+ String content = IOUtil.readInput(templateResource);
+ content = content.replaceFirst(Pattern.quote(defaultValue), servers);
+ File hasFile = new File(confDir, "has-client.conf");
+ IOUtil.writeFile(content, hasFile);
+ return hasFile;
+ } else {
+ throw new HasException("has-server.conf not found. ");
+ }
+ }
+
+ public void stopKdcServer() {
+ try {
+ kdcServer.stop();
+ } catch (KrbException e) {
+ LOG.error("Fail to stop has kdc server");
+ }
+ }
+
+ public void startWebServer() throws HasException {
+ if (webServer == null) {
+ HasConfig conf = new HasConfig();
+
+ // Parse has-server.conf to get http_host and http_port
+ File confFile = new File(confDir, "has-server.conf");
+ hasConfig = HasUtil.getHasConfig(confFile);
+ if (hasConfig != null) {
+ try {
+ String httpHost;
+ String httpPort;
+ String httpsHost;
+ String httpsPort;
+ if (hasConfig.getHttpHost() != null) {
+ httpHost = hasConfig.getHttpHost();
+ } else {
+ LOG.info("Cannot get the http_host from has-server.conf, using the default http host.");
+ httpHost = WebConfigKey.HAS_HTTP_HOST_DEFAULT;
+ }
+ if (hasConfig.getHttpPort() != null) {
+ httpPort = hasConfig.getHttpPort();
+ } else {
+ LOG.info("Cannot get the http_port from has-server.conf, using the default http port.");
+ httpPort = String.valueOf(WebConfigKey.HAS_HTTP_PORT_DEFAULT);
+ }
+ if (hasConfig.getHttpsHost() != null) {
+ httpsHost = hasConfig.getHttpsHost();
+ } else {
+ LOG.info("Cannot get the https_host from has-server.conf, using the default https host.");
+ httpsHost = WebConfigKey.HAS_HTTPS_HOST_DEFAULT;
+ }
+ if (hasConfig.getHttpsPort() != null) {
+ httpsPort = hasConfig.getHttpsPort();
+ } else {
+ LOG.info("Cannot get the https_port from has-server.conf , using the default https port.");
+ httpsPort = String.valueOf(WebConfigKey.HAS_HTTPS_PORT_DEFAULT);
+ }
+ String hasHttpAddress = httpHost + ":" + httpPort;
+ String hasHttpsAddress = httpsHost + ":" + httpsPort;
+ LOG.info("The web server http address: " + hasHttpAddress);
+ LOG.info("The web server https address: " + hasHttpsAddress);
+
+ conf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY, hasHttpAddress);
+ conf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, hasHttpsAddress);
+ conf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY,
+ HttpConfig.Policy.HTTP_AND_HTTPS.name());
+ conf.setString(WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+ hasConfig.getSslServerConf());
+ webServer = new WebServer(conf);
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException("https_port should be a number. "
+ + e.getMessage());
+ }
+ } else {
+ throw new HasException("has-server.conf not found in " + confDir + ". ");
+ }
+ } else {
+ hasConfig = webServer.getConf();
+ }
+ webServer.start();
+ webServer.defineConfFilter();
+ try {
+ HasUtil.setEnableConf(new File(confDir, "has-server.conf"), "true");
+ } catch (IOException e) {
+ throw new HasException("Errors occurred when enable conf. " + e.getMessage());
+ }
+ webServer.setWebServerAttribute(this);
+ }
+
+ public void stopWebServer() {
+ if (webServer != null) {
+ try {
+ webServer.stop();
+ } catch (Exception e) {
+ LOG.error("Failed to stop http server. " + e.getMessage());
+ }
+ }
+ }
+
+ public static void main(String[] args) {
+ if (args[0].equals("-start")) {
+ String confDirPath = args[1];
+ String workDirPath = args[2];
+ File confDir = new File(confDirPath);
+ File workDir = new File(workDirPath);
+ if (!confDir.exists() || !workDir.exists()) {
+ LOG.error("Invalid or not exist conf-dir or work-dir");
+ System.exit(3);
+ }
+ try {
+ server = new HasServer(confDir);
+ } catch (KrbException e) {
+ LOG.error("Errors occurred when create kdc server: " + e.getMessage());
+ System.exit(4);
+ }
+ server.setConfDir(confDir);
+ server.setWorkDir(workDir);
+ //Only start the webserver, the kdcserver can start after setting the realm
+ try {
+ server.startWebServer();
+ } catch (HasException e) {
+ LOG.error("Errors occurred when start has http server: " + e.getMessage());
+ System.exit(6);
+ }
+
+ if (server.getWebServer().getHttpAddress() != null) {
+ LOG.info("HAS http server started.");
+ LOG.info("host: " + server.getWebServer().getHttpAddress().getHostName());
+ LOG.info("port: " + server.getWebServer().getHttpAddress().getPort());
+ }
+ if (server.getWebServer().getHttpsAddress() != null) {
+ LOG.info("HAS https server started.");
+ LOG.info("host: " + server.getWebServer().getHttpsAddress().getHostName());
+ LOG.info("port: " + server.getWebServer().getHttpsAddress().getPort());
+ }
+ } else if (args[0].equals("-stop")) {
+ if (server != null) {
+ server.stopWebServer();
+ server.stopKdcServer();
+ }
+ } else {
+ System.exit(2);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
new file mode 100644
index 0000000..6650308
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+
+public interface HasServerPlugin {
+ /**
+ * Get the login module type ID, used to distinguish this module from others.
+ * Should correspond to the client side module.
+ *
+ * @return login type
+ */
+ String getLoginType();
+
+ /**
+ * Perform all the server side authentication logics, the results wrapped in an AuthToken,
+ * will be used to exchange a Kerberos ticket.
+ *
+ * @param userToken user token
+ * @return auth token
+ */
+ AuthToken authenticate(AuthToken userToken) throws HasAuthenException;
+}
http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
new file mode 100644
index 0000000..621b321
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server;
+
+import org.apache.hadoop.has.common.HasException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.ServiceLoader;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class HasServerPluginRegistry {
+ static final Logger LOG = LoggerFactory.getLogger(HasServerPluginRegistry.class);
+
+ private static Map<String, Class> allPlugins = new ConcurrentHashMap<>();
+
+ static {
+ ServiceLoader<HasServerPlugin> plugins = ServiceLoader.load(HasServerPlugin.class);
+
+ for (HasServerPlugin plugin : plugins) {
+ allPlugins.put(plugin.getLoginType(), plugin.getClass());
+ }
+ }
+
+ public static Set<String> registeredPlugins() {
+ return Collections.unmodifiableSet(allPlugins.keySet());
+ }
+
+ public static boolean registeredPlugin(String name) {
+ return allPlugins.containsKey(name);
+ }
+
+ public static HasServerPlugin createPlugin(String name) throws HasException {
+ if (!registeredPlugin(name)) {
+ throw new HasException("Unregistered plugin " + name);
+ }
+ try {
+ HasServerPlugin serverPlugin = (HasServerPlugin) allPlugins.get(name).newInstance();
+ return serverPlugin;
+ } catch (Exception e) {
+ LOG.error("Create {} plugin failed", name, e);
+ throw new HasException(e.getMessage());
+ }
+ }
+}