You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sqoop.apache.org by ja...@apache.org on 2015/09/30 17:22:50 UTC

sqoop git commit: SQOOP-910: Sqoop2: Move to embedded jetty from tomcat

Repository: sqoop
Updated Branches:
  refs/heads/sqoop2 2e74a9155 -> c8780d0fe


SQOOP-910: Sqoop2: Move to embedded jetty from tomcat

(Colin Ma via Jarek Jarcec Cecho)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/c8780d0f
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/c8780d0f
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/c8780d0f

Branch: refs/heads/sqoop2
Commit: c8780d0fe678b352a98456be1ffa6ec6604c0096
Parents: 2e74a91
Author: Jarek Jarcec Cecho <ja...@apache.org>
Authored: Wed Sep 30 08:22:00 2015 -0700
Committer: Jarek Jarcec Cecho <ja...@apache.org>
Committed: Wed Sep 30 08:22:00 2015 -0700

----------------------------------------------------------------------
 .../apache/sqoop/audit/AuditLoggerManager.java  |   4 +-
 .../sqoop/connector/ConnectorManager.java       |  23 +-
 .../java/org/apache/sqoop/core/SqoopServer.java |   6 +-
 .../org/apache/sqoop/driver/JobManager.java     |   4 +-
 dist/pom.xml                                    |  41 ++--
 dist/src/main/bin/sqoop-sys.sh                  |  14 --
 dist/src/main/bin/sqoop.sh                      | 183 +++++++++++---
 dist/src/main/conf/sqoop.properties             | 189 +++++++++++++++
 dist/src/main/conf/sqoop_bootstrap.properties   |  37 +++
 pom.xml                                         |  12 +-
 server/pom.xml                                  |  49 +++-
 .../sqoop/server/SqoopJettyConstants.java       |  65 +++++
 .../apache/sqoop/server/SqoopJettyContext.java  |  61 +++++
 .../apache/sqoop/server/SqoopJettyServer.java   | 125 ++++++++++
 test/pom.xml                                    |   6 +-
 .../providers/SqoopInfrastructureProvider.java  |   4 +-
 .../test/minicluster/JettySqoopMiniCluster.java |  64 +++++
 .../sqoop/test/testcases/ConnectorTestCase.java |   7 +-
 .../sqoop/test/testcases/JettyTestCase.java     | 240 +++++++++++++++++++
 .../sqoop/test/testcases/TomcatTestCase.java    | 239 ------------------
 .../upgrade/DerbyRepositoryUpgradeTest.java     |  15 +-
 tools/pom.xml                                   |  30 ++-
 22 files changed, 1068 insertions(+), 350 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/core/src/main/java/org/apache/sqoop/audit/AuditLoggerManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/sqoop/audit/AuditLoggerManager.java b/core/src/main/java/org/apache/sqoop/audit/AuditLoggerManager.java
index af9282c..5e4adc6 100644
--- a/core/src/main/java/org/apache/sqoop/audit/AuditLoggerManager.java
+++ b/core/src/main/java/org/apache/sqoop/audit/AuditLoggerManager.java
@@ -64,11 +64,13 @@ public class AuditLoggerManager implements Reconfigurable {
   }
 
   private AuditLoggerManager() {
-    loggers = new ArrayList<AuditLogger>();
   }
 
   public synchronized void initialize() {
     LOG.info("Begin audit logger manager initialization");
+    if (loggers == null) {
+      loggers = new ArrayList<AuditLogger>();
+    }
     initializeLoggers();
 
     SqoopConfiguration.getInstance().getProvider()

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/core/src/main/java/org/apache/sqoop/connector/ConnectorManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/sqoop/connector/ConnectorManager.java b/core/src/main/java/org/apache/sqoop/connector/ConnectorManager.java
index b0a6841..285e893 100644
--- a/core/src/main/java/org/apache/sqoop/connector/ConnectorManager.java
+++ b/core/src/main/java/org/apache/sqoop/connector/ConnectorManager.java
@@ -94,11 +94,11 @@ public class ConnectorManager implements Reconfigurable {
   }
 
   // key: connector id, value: connector name
-  private Map<Long, String> idToNameMap = new HashMap<Long, String>();
+  private Map<Long, String> idToNameMap;
   private Set<String> connectorNames = new HashSet<String>();
 
   // key: connector name, value: connector handler
-  private Map<String, ConnectorHandler> handlerMap = new HashMap<String, ConnectorHandler>();
+  private Map<String, ConnectorHandler> handlerMap;
 
   public List<MConnector> getConnectorConfigurables() {
     List<MConnector> connectors = new LinkedList<MConnector>();
@@ -112,10 +112,6 @@ public class ConnectorManager implements Reconfigurable {
     return idToNameMap.keySet();
   }
 
-  public Set<String> getConnectorNames() {
-    return connectorNames;
-  }
-
   public Map<Long, ResourceBundle> getResourceBundles(Locale locale) {
     Map<Long, ResourceBundle> bundles = new HashMap<Long, ResourceBundle>();
     for (ConnectorHandler handler : handlerMap.values()) {
@@ -166,6 +162,16 @@ public class ConnectorManager implements Reconfigurable {
   }
 
   public synchronized void initialize(boolean autoUpgrade) {
+    if (handlerMap == null) {
+      handlerMap = new HashMap<String, ConnectorHandler>();
+    }
+    if (idToNameMap == null) {
+      idToNameMap = new HashMap<Long, String>();
+    }
+    if (connectorNames == null) {
+      connectorNames = new HashSet<String>();
+    }
+
     if (LOG.isTraceEnabled()) {
       LOG.trace("Begin connector manager initialization");
     }
@@ -234,8 +240,9 @@ public class ConnectorManager implements Reconfigurable {
   }
 
   public synchronized void destroy() {
-      handlerMap = null;
-      idToNameMap = null;
+    handlerMap = null;
+    idToNameMap = null;
+    connectorNames = null;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/core/src/main/java/org/apache/sqoop/core/SqoopServer.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/sqoop/core/SqoopServer.java b/core/src/main/java/org/apache/sqoop/core/SqoopServer.java
index 75cea65..555728c 100644
--- a/core/src/main/java/org/apache/sqoop/core/SqoopServer.java
+++ b/core/src/main/java/org/apache/sqoop/core/SqoopServer.java
@@ -49,7 +49,7 @@ public class SqoopServer {
 
   public static void initialize() {
     try {
-      LOG.info("Booting up Sqoop server");
+      LOG.info("Initializing Sqoop server.");
       SqoopConfiguration.getInstance().initialize();
       AuthenticationManager.getInstance().initialize();
       AuthorizationManager.getInstance().initialize();
@@ -58,9 +58,9 @@ public class SqoopServer {
       ConnectorManager.getInstance().initialize();
       Driver.getInstance().initialize();
       JobManager.getInstance().initialize();
-      LOG.info("Sqoop server has successfully boot up");
+      LOG.info("Sqoop server has successfully been initialized.");
     } catch (RuntimeException | ClassNotFoundException | IllegalAccessException | InstantiationException e) {
-      LOG.error("Server startup failure", e);
+      LOG.error("Failure in server initialization", e);
       throw new RuntimeException("Failure in server initialization", e);
     }
   }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/core/src/main/java/org/apache/sqoop/driver/JobManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/sqoop/driver/JobManager.java b/core/src/main/java/org/apache/sqoop/driver/JobManager.java
index de1b976..ebb7efd 100644
--- a/core/src/main/java/org/apache/sqoop/driver/JobManager.java
+++ b/core/src/main/java/org/apache/sqoop/driver/JobManager.java
@@ -138,7 +138,7 @@ public class JobManager implements Reconfigurable {
   /**
    * Synchronization variable between threads.
    */
-  private boolean running = true;
+  private boolean running;
 
   /**
    * Specifies how old submissions should be removed from repository.
@@ -215,7 +215,7 @@ public class JobManager implements Reconfigurable {
   public synchronized void initialize() {
     LOG.trace("Begin submission engine manager initialization");
     MapContext context = SqoopConfiguration.getInstance().getContext();
-
+    running = true;
     // Let's load configured submission engine
     String submissionEngineClassName =
       context.getString(DriverConstants.SYSCFG_SUBMISSION_ENGINE);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/dist/pom.xml
----------------------------------------------------------------------
diff --git a/dist/pom.xml b/dist/pom.xml
index 37cd5ba..5e324a9 100644
--- a/dist/pom.xml
+++ b/dist/pom.xml
@@ -40,7 +40,6 @@ limitations under the License.
     <dependency>
       <groupId>org.apache.sqoop</groupId>
       <artifactId>sqoop-server</artifactId>
-      <type>war</type>
     </dependency>
     <dependency>
       <groupId>org.apache.sqoop</groupId>
@@ -155,27 +154,13 @@ limitations under the License.
                       </fileset>
                      </copy>
 
-                    <!-- Build server directory -->
-                    <get src="http://archive.apache.org/dist/tomcat/tomcat-${tomcat.major.version}/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz"
-                         dest="target/apache-tomcat.tar.gz" verbose="true" skipexisting="true"/>
-                    <untar src="target/apache-tomcat.tar.gz" dest="target" compression="gzip"/>
-                    <move file="target/apache-tomcat-${tomcat.version}" tofile="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server"/>
-                    <copy todir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server/bin">
-                      <fileset dir="src/main/server/bin"/>
-                    </copy>
-                    <chmod dir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server/bin" perm="u+x" includes="**/*.sh"/>
-                    <copy todir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server/conf">
-                      <fileset dir="src/main/server/conf"/>
-                    </copy>
-                    <delete dir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server/webapps"/>
-                    <mkdir dir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server/webapps"/>
-                    <copy todir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server/webapps">
-                      <fileset dir="src/main/server/webapps"/>
+                    <copy todir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server/lib">
+                      <fileset dir="../server/target/lib">
+                        <include name="*.jar" />
+                      </fileset>
                     </copy>
-                    <copy file="../server/target/sqoop.war"
-                      toDir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server/webapps"/>
-                    <copy file="../tomcat/target/sqoop-tomcat-${project.version}.jar"
-                      toDir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server/lib"/>
+                    <copy file="../server/target/sqoop-server-${project.version}.jar"
+                            toDir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/server/lib"/>
 
                     <!-- Build shell client directory -->
                     <copy todir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/shell/lib">
@@ -188,6 +173,20 @@ limitations under the License.
                     <copy file="../shell/target/sqoop-shell-${project.version}.jar"
                       toDir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/shell/lib"/>
 
+                    <!-- Build tools client directory -->
+                    <copy todir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/tools/lib">
+                      <fileset dir="../tools/target/lib">
+                        <include name="*.jar" />
+                        </fileset>
+                    </copy>
+                    <copy file="../tools/target/sqoop-tools-${project.version}.jar"
+                          toDir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/tools/lib"/>
+
+                    <!-- Build "conf" directory -->
+                    <copy todir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/conf">
+                        <fileset dir="src/main/conf"/>
+                    </copy>
+
                     <!-- Build "bin" directory -->
                     <copy todir="target/sqoop-${project.version}-bin-hadoop${hadoop.version}/bin">
                       <fileset dir="src/main/bin"/>

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/dist/src/main/bin/sqoop-sys.sh
----------------------------------------------------------------------
diff --git a/dist/src/main/bin/sqoop-sys.sh b/dist/src/main/bin/sqoop-sys.sh
index 64dd0bf..194c227 100644
--- a/dist/src/main/bin/sqoop-sys.sh
+++ b/dist/src/main/bin/sqoop-sys.sh
@@ -27,17 +27,3 @@ ECHO="true"
 if [ "${1}" == "-silent" ]; then
   ECHO="false"
 fi
-
-if [ "${SQOOP_HTTP_PORT}" = "" ]; then
-  export SQOOP_HTTP_PORT=12000
-  print "Setting SQOOP_HTTP_PORT:     ${SQOOP_HTTP_PORT}"
-else
-  print "Using   SQOOP_HTTP_PORT:     ${SQOOP_HTTP_PORT}"
-fi
-
-if [ "${SQOOP_ADMIN_PORT}" = "" ]; then
-  export SQOOP_ADMIN_PORT=`expr $SQOOP_HTTP_PORT +  1`
-  print "Setting SQOOP_ADMIN_PORT:     ${SQOOP_ADMIN_PORT}"
-else
-  print "Using   SQOOP_ADMIN_PORT:     ${SQOOP_ADMIN_PORT}"
-fi

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/dist/src/main/bin/sqoop.sh
----------------------------------------------------------------------
diff --git a/dist/src/main/bin/sqoop.sh b/dist/src/main/bin/sqoop.sh
index 707c3fc..1bc046f 100755
--- a/dist/src/main/bin/sqoop.sh
+++ b/dist/src/main/bin/sqoop.sh
@@ -24,6 +24,80 @@ function print_usage(){
   echo ""
 }
 
+function sqoop_server_classpath_set {
+
+  HADOOP_COMMON_HOME=${HADOOP_COMMON_HOME:-${HADOOP_HOME}/share/hadoop/common}
+  HADOOP_HDFS_HOME=${HADOOP_HDFS_HOME:-${HADOOP_HOME}/share/hadoop/hdfs}
+  HADOOP_MAPRED_HOME=${HADOOP_MAPRED_HOME:-${HADOOP_HOME}/share/hadoop/mapreduce}
+  HADOOP_YARN_HOME=${HADOOP_YARN_HOME:-${HADOOP_HOME}/share/hadoop/yarn}
+
+  if [[ ! (-d "${HADOOP_COMMON_HOME}" && -d "${HADOOP_HDFS_HOME}" && -d "${HADOOP_MAPRED_HOME}" && -d "${HADOOP_YARN_HOME}") ]]; then
+    echo "Can't load the Hadoop related java lib, please check the setting for the following environment variables:"
+    echo "    HADOOP_COMMON_HOME, HADOOP_HDFS_HOME, HADOOP_MAPRED_HOME, HADOOP_YARN_HOME"
+    exit
+  fi
+
+  for f in $SQOOP_SERVER_LIB/*.jar; do
+    CLASSPATH="${CLASSPATH}:$f"
+  done
+
+  for f in $HADOOP_COMMON_HOME/*.jar; do
+    CLASSPATH="${CLASSPATH}:$f"
+  done
+
+  for f in $HADOOP_COMMON_HOME/lib/*.jar; do
+    CLASSPATH="${CLASSPATH}:$f"
+  done
+
+  for f in $HADOOP_HDFS_HOME/*.jar; do
+    CLASSPATH="${CLASSPATH}:$f"
+  done
+
+  for f in $HADOOP_HDFS_HOME/lib/*.jar; do
+    CLASSPATH="${CLASSPATH}:$f"
+  done
+
+  for f in $HADOOP_MAPRED_HOME/*.jar; do
+    CLASSPATH="${CLASSPATH}:$f"
+  done
+
+  for f in $HADOOP_MAPRED_HOME/lib/*.jar; do
+    CLASSPATH="${CLASSPATH}:$f"
+  done
+
+  for f in $HADOOP_YARN_HOME/*.jar; do
+    CLASSPATH="${CLASSPATH}:$f"
+  done
+
+  for f in $HADOOP_YARN_HOME/lib/*.jar; do
+    CLASSPATH="${CLASSPATH}:$f"
+  done
+
+  for f in $HIVE_HOME/lib/*.jar; do
+    # exclude the jdbc for derby, to avoid the sealing violation exception
+    if [[ ! $f =~ derby* && ! $f =~ jetty* ]]; then
+      CLASSPATH="${CLASSPATH}:$f"
+    fi
+  done
+}
+
+function is_sqoop_server_running {
+  if [[ -f "${sqoop_pidfile}" ]]; then
+    kill -s 0 $(cat "$sqoop_pidfile") >/dev/null 2>&1
+    return $?
+  else
+    return 1
+  fi
+}
+
+function sqoop_extra_classpath_set {
+  if [[ -n "${SQOOP_SERVER_EXTRA_LIB}" ]]; then
+    for f in $SQOOP_SERVER_EXTRA_LIB/*.jar; do
+      CLASSPATH="${CLASSPATH}:$f"
+    done
+  fi
+}
+
 if [ $# = 0 ]; then
   print_usage
   exit
@@ -44,25 +118,30 @@ done
 
 BASEDIR=`dirname ${PRG}`
 BASEDIR=`cd ${BASEDIR}/..;pwd`
+SQOOP_IDENT_STRING=${SQOOP_IDENT_STRING:-$USER}
+SQOOP_PID_DIR=${SQOOP_PID_DIR:-/tmp}
+sqoop_pidfile="${SQOOP_PID_DIR}/sqoop-${SQOOP_IDENT_STRING}-jetty-server.pid"
+JAVA_OPTS="$JAVA_OPTS -Dsqoop.config.dir=`dirname $0`/../conf"
 
 echo "Sqoop home directory: ${BASEDIR}"
 
-CATALINA_BIN=${CATALINA_BIN:-${BASEDIR}/server/bin}
-CLIENT_LIB=${CLIENT_LIB:-${BASEDIR}/shell/lib}
-
-setup_catalina_opts() {
-  # The Java System properties 'sqoop.http.port' and 'sqoop.admin.port' are
-  # not used by Sqoop. They are used in Tomcat's server.xml configuration file
-  echo "Using   CATALINA_OPTS:       ${CATALINA_OPTS}"
+SQOOP_CLIENT_LIB=${BASEDIR}/shell/lib
+SQOOP_SERVER_LIB=${BASEDIR}/server/lib
+SQOOP_TOOLS_LIB=${BASEDIR}/tools/lib
 
-  catalina_opts="-Dsqoop.http.port=${SQOOP_HTTP_PORT}";
-  catalina_opts="${catalina_opts} -Dsqoop.admin.port=${SQOOP_ADMIN_PORT}";
-
-  echo "Adding to CATALINA_OPTS:    ${catalina_opts}"
+EXEC_JAVA='java'
+if [ -n "${JAVA_HOME}" ] ; then
+    EXEC_JAVA="${JAVA_HOME}/bin/java"
+fi
 
-  export CATALINA_OPTS="${CATALINA_OPTS} ${catalina_opts}"
-}
+# validation the java command
+${EXEC_JAVA} -version 2>/dev/null
+if [[ $? -gt 0 ]]; then
+  echo "Can't find the path for java, please check the environment setting."
+  exit
+fi
 
+sqoop_extra_classpath_set
 COMMAND=$1
 case $COMMAND in
   tool)
@@ -72,48 +151,82 @@ case $COMMAND in
     fi
 
     source ${BASEDIR}/bin/sqoop-sys.sh
-    setup_catalina_opts
 
     # Remove the "tool" keyword from the command line and pass the rest
     shift
 
-    $CATALINA_BIN/tool-wrapper.sh -server org.apache.sqoop.tomcat.TomcatToolRunner $@
+    # Build class path with full path to each library,including tools ,server and hadoop related
+    for f in $SQOOP_TOOLS_LIB/*.jar; do
+      CLASSPATH="${CLASSPATH}:$f"
+    done
+
+    # Build class path with full path to each library, including hadoop related
+    sqoop_server_classpath_set
+
+    ${EXEC_JAVA} $JAVA_OPTS -classpath ${CLASSPATH} org.apache.sqoop.tools.ToolRunner $@
     ;;
   server)
     if [ $# = 1 ]; then
       echo "Usage: sqoop.sh server <start/stop>"
       exit
     fi
-    actionCmd=$2
 
     source ${BASEDIR}/bin/sqoop-sys.sh
-    setup_catalina_opts
-
-    # There seems to be a bug in catalina.sh whereby catalina.sh doesn't respect
-    # CATALINA_OPTS when stopping the tomcat server. Consequently, we have to hack around
-    # by specifying the CATALINA_OPTS properties in JAVA_OPTS variable
-    if [ "$actionCmd" == "stop" ]; then
-      export JAVA_OPTS="$JAVA_OPTS $CATALINA_OPTS"
-    fi
 
-    # Remove the first 2 command line arguments (server and action command (start/stop)) so we can pass
-    # the rest to catalina.sh script
-    shift
-    shift
-
-    $CATALINA_BIN/catalina.sh $actionCmd "$@"
+    case $2 in
+      start)
+        # check if the sqoop server started already.
+        is_sqoop_server_running
+        if [[ $? -eq 0 ]]; then
+          echo "The Sqoop server is already started."
+          exit
+        fi
+
+        # Build class path with full path to each library, including hadoop related
+        sqoop_server_classpath_set
+
+        echo "Starting the Sqoop2 server..."
+        ${EXEC_JAVA} $JAVA_OPTS -classpath ${CLASSPATH} org.apache.sqoop.server.SqoopJettyServer &
+
+        echo $! > "${sqoop_pidfile}" 2>/dev/null
+        if [[ $? -gt 0 ]]; then
+          echo "ERROR:  Cannot write pid ${pidfile}."
+        fi
+
+        # wait 5 seconds, then check if the sqoop server started successfully.
+        sleep 5
+        is_sqoop_server_running
+        if [[ $? -eq 0 ]]; then
+          echo "Sqoop2 server started."
+        fi
+      ;;
+      stop)
+        # check if the sqoop server stopped already.
+        is_sqoop_server_running
+        if [[ $? -gt 0 ]]; then
+          echo "No Sqoop server is running."
+          exit
+        fi
+
+        pid=$(cat "$sqoop_pidfile")
+        echo "Stopping the Sqoop2 server..."
+        kill -9 "${pid}" >/dev/null 2>&1
+        rm -f "${sqoop_pidfile}"
+        echo "Sqoop2 server stopped."
+      ;;
+      *)
+        echo "Unknown command, usage: sqoop.sh server <start/stop>"
+        exit
+      ;;
+    esac
     ;;
 
   client)
     # Build class path with full path to each library
-    for f in $CLIENT_LIB/*.jar; do
+    for f in $SQOOP_CLIENT_LIB/*.jar; do
       CLASSPATH="${CLASSPATH}:$f"
     done
 
-    EXEC_JAVA='java'
-    if [ -n "${JAVA_HOME}" ] ; then
-        EXEC_JAVA="${JAVA_HOME}/bin/java"
-    fi
     ${EXEC_JAVA} $JAVA_OPTS -classpath ${CLASSPATH} org.apache.sqoop.shell.SqoopShell $2
     ;;
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/dist/src/main/conf/sqoop.properties
----------------------------------------------------------------------
diff --git a/dist/src/main/conf/sqoop.properties b/dist/src/main/conf/sqoop.properties
new file mode 100755
index 0000000..f077ecc
--- /dev/null
+++ b/dist/src/main/conf/sqoop.properties
@@ -0,0 +1,189 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Sqoop configuration file used by the built in configuration
+# provider: org.apache.sqoop.core.PropertiesConfigurationProvider.
+# This file must reside in the system configuration directory
+# which is specified by the system property "sqoop.config.dir"
+# and must be called sqoop.properties.
+#
+# NOTE: Tokens specified in this file that are marked by a
+# leading and trailing '@' characters should be replaced by
+# their appropriate values. For example, the token @LOGDIR@
+# should be replaced  appropriately.
+#
+# The following tokens are used in this configuration file:
+#
+# LOGDIR
+#   The absolute path to the directory where system genearated
+#   log files will be kept.
+#
+# BASEDIR
+#   The absolute path to the directory where Sqoop 2 is installed
+#
+
+#
+# Logging Configuration
+# Any property that starts with the prefix
+# org.apache.sqoop.log4j is parsed out by the configuration
+# system and passed to the log4j subsystem. This allows you
+# to specify log4j configuration properties from within the
+# Sqoop configuration.
+#
+org.apache.sqoop.log4j.appender.file=org.apache.log4j.RollingFileAppender
+org.apache.sqoop.log4j.appender.file.File=@LOGDIR@/sqoop.log
+org.apache.sqoop.log4j.appender.file.MaxFileSize=25MB
+org.apache.sqoop.log4j.appender.file.MaxBackupIndex=5
+org.apache.sqoop.log4j.appender.file.layout=org.apache.log4j.PatternLayout
+org.apache.sqoop.log4j.appender.file.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} [%l] %m%n
+org.apache.sqoop.log4j.debug=false
+org.apache.sqoop.log4j.rootCategory=WARN, file
+org.apache.sqoop.log4j.category.org.apache.sqoop=DEBUG
+org.apache.sqoop.log4j.category.org.apache.derby=INFO
+
+#
+# Audit Loggers Configuration
+# Multiple audit loggers could be given here. To specify an
+# audit logger, you should at least add org.apache.sqoop.
+# auditlogger.[LoggerName].class. You could also provide
+# more configuration options by using org.apache.sqoop.
+# auditlogger.[LoggerName] prefix, then all these options
+# are parsed to the logger class.
+#
+org.apache.sqoop.auditlogger.default.class=org.apache.sqoop.audit.FileAuditLogger
+org.apache.sqoop.auditlogger.default.file=@LOGDIR@/default.audit
+
+#
+# Repository configuration
+# The Repository subsystem provides the special prefix which
+# is "org.apache.sqoop.repository.sysprop". Any property that
+# is specified with this prefix is parsed out and set as a
+# system property. For example, if the built in Derby repository
+# is being used, the sysprop prefixed properties can be used
+# to affect Derby configuration at startup time by setting
+# the appropriate system properties.
+#
+
+# Repository provider
+org.apache.sqoop.repository.provider=org.apache.sqoop.repository.JdbcRepositoryProvider
+
+# Repository upgrade
+# If set to true, it will not upgrade the sqoop respository schema, by default it will iniate the upgrade on server start-up
+org.apache.sqoop.repository.schema.immutable=false
+
+# JDBC repository provider configuration
+org.apache.sqoop.repository.jdbc.handler=org.apache.sqoop.repository.derby.DerbyRepositoryHandler
+org.apache.sqoop.repository.jdbc.transaction.isolation=READ_COMMITTED
+org.apache.sqoop.repository.jdbc.maximum.connections=10
+org.apache.sqoop.repository.jdbc.url=jdbc:derby:@BASEDIR@/repository/db;create=true
+org.apache.sqoop.repository.jdbc.driver=org.apache.derby.jdbc.EmbeddedDriver
+org.apache.sqoop.repository.jdbc.user=sa
+org.apache.sqoop.repository.jdbc.password=
+
+# System properties for embedded Derby configuration
+org.apache.sqoop.repository.sysprop.derby.stream.error.file=@LOGDIR@/derbyrepo.log
+
+#
+# Sqoop Connector configuration
+# If set to true will initiate Connectors config upgrade during server startup
+#
+org.apache.sqoop.connector.autoupgrade=false
+
+#
+# Sqoop Driver configuration
+# If set to true will initiate the Driver config upgrade during server startup
+#
+org.apache.sqoop.driver.autoupgrade=false
+
+# Sleeping period for reloading configuration file (once a minute)
+org.apache.sqoop.core.configuration.provider.properties.sleep=60000
+
+#
+# Submission engine configuration
+#
+
+# Submission engine class
+org.apache.sqoop.submission.engine=org.apache.sqoop.submission.mapreduce.MapreduceSubmissionEngine
+
+# Number of milliseconds, submissions created before this limit will be removed, default is one day
+#org.apache.sqoop.submission.purge.threshold=
+
+# Number of milliseconds for purge thread to sleep, by default one day
+#org.apache.sqoop.submission.purge.sleep=
+
+# Number of milliseconds for update thread to sleep, by default 5 minutes
+#org.apache.sqoop.submission.update.sleep=
+
+#
+# Configuration for Mapreduce submission engine (applicable if it's configured)
+#
+
+# Hadoop configuration directory
+org.apache.sqoop.submission.engine.mapreduce.configuration.directory=/etc/hadoop/conf/
+
+#
+# Execution engine configuration
+#
+org.apache.sqoop.execution.engine=org.apache.sqoop.execution.mapreduce.MapreduceExecutionEngine
+
+#
+# Authentication configuration
+#
+#org.apache.sqoop.security.authentication.type=SIMPLE
+#org.apache.sqoop.security.authentication.handler=org.apache.sqoop.security.authentication.SimpleAuthenticationHandler
+#org.apache.sqoop.security.authentication.anonymous=true
+#org.apache.sqoop.security.authentication.type=KERBEROS
+#org.apache.sqoop.security.authentication.handler=org.apache.sqoop.security.authentication.KerberosAuthenticationHandler
+#org.apache.sqoop.security.authentication.kerberos.principal=sqoop/_HOST@NOVALOCAL
+#org.apache.sqoop.security.authentication.kerberos.keytab=/home/kerberos/sqoop.keytab
+#org.apache.sqoop.security.authentication.kerberos.http.principal=HTTP/_HOST@NOVALOCAL
+#org.apache.sqoop.security.authentication.kerberos.http.keytab=/home/kerberos/sqoop.keytab
+#org.apache.sqoop.security.authentication.enable.doAs=true
+#org.apache.sqoop.security.authentication.proxyuser.#USER#.users=*
+#org.apache.sqoop.security.authentication.proxyuser.#USER#.groups=*
+#org.apache.sqoop.security.authentication.proxyuser.#USER#.hosts=*
+
+# Default user, default value is "sqoop.anonymous.user"
+#org.apache.sqoop.security.authentication.default.user=
+
+#
+# Authorization configuration
+#
+#org.apache.sqoop.security.authorization.handler=org.apache.sqoop.security.authorization.DefaultAuthorizationHandler
+#org.apache.sqoop.security.authorization.access_controller=org.apache.sqoop.security.authorization.DefaultAuthorizationAccessController
+#org.apache.sqoop.security.authorization.validator=org.apache.sqoop.security.authorization.DefaultAuthorizationValidator
+#org.apache.sqoop.security.authorization.authentication_provider=org.apache.sqoop.security.authorization.DefaultAuthenticationProvider
+#org.apache.sqoop.security.authorization.server_name=SqoopServer1
+
+# External connectors load path
+# "/path/to/external/connectors/": Add all the connector JARs in the specified folder
+#
+org.apache.sqoop.connector.external.loadpath=
+
+# Sqoop application classpath
+# ":" separated list of jars to be included in sqoop.
+#
+org.apache.sqoop.classpath.extra=
+
+#
+# Jetty Server configuration
+#
+#org.apache.sqoop.jetty.thread.pool.worker.max=500
+#org.apache.sqoop.jetty.thread.pool.worker.min=5
+#org.apache.sqoop.jetty.thread.pool.worker.alive.time=60
+#org.apache.sqoop.jetty.port=12000

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/dist/src/main/conf/sqoop_bootstrap.properties
----------------------------------------------------------------------
diff --git a/dist/src/main/conf/sqoop_bootstrap.properties b/dist/src/main/conf/sqoop_bootstrap.properties
new file mode 100755
index 0000000..d7bfb5e
--- /dev/null
+++ b/dist/src/main/conf/sqoop_bootstrap.properties
@@ -0,0 +1,37 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Bootstrap configuration for Sqoop. This file is picked up
+# from the directory specified by the system property
+# "sqoop.config.dir". Sqoop will not boot up if this property
+# is not set, or if there is no file by the name
+# "sqoop_bootstrap.properties" in the directory pointed by
+# this system property.
+#
+
+#
+# Specifies the configuration provider to be used.
+# This is a required configuration that must be specified.
+# The default value for this is:
+#   org.apache.sqoop.core.PropertiesConfigurationProvider
+# The PropertiesConfigurationProvider expects the system
+# configuration directory to be specified by the system
+# property "sqoop.config.dir", and it must contain a file
+# by the name "sqoop.properties".
+#
+sqoop.config.provider=org.apache.sqoop.core.PropertiesConfigurationProvider
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index ef3f5f4..e438113 100644
--- a/pom.xml
+++ b/pom.xml
@@ -123,6 +123,7 @@ limitations under the License.
     <avro.version>1.7.7</avro.version>
     <jcommander.version>1.27</jcommander.version>
     <findbugs.version>1.3.2</findbugs.version>
+    <jetty.version>9.2.13.v20150730</jetty.version>
   </properties>
 
   <dependencies>
@@ -289,7 +290,6 @@ limitations under the License.
       <dependency>
         <groupId>org.apache.sqoop</groupId>
         <artifactId>sqoop-server</artifactId>
-        <type>war</type>
         <version>${project.version}</version>
       </dependency>
       <dependency>
@@ -656,6 +656,16 @@ limitations under the License.
         <version>${findbugs.version}</version>
         <scope>provided</scope>
       </dependency>
+      <dependency>
+        <groupId>org.eclipse.jetty</groupId>
+        <artifactId>jetty-server</artifactId>
+        <version>${jetty.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.eclipse.jetty</groupId>
+        <artifactId>jetty-servlet</artifactId>
+        <version>${jetty.version}</version>
+      </dependency>
    </dependencies>
   </dependencyManagement>
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/server/pom.xml
----------------------------------------------------------------------
diff --git a/server/pom.xml b/server/pom.xml
index 59663fa..ca068e0 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -30,7 +30,6 @@ limitations under the License.
   <groupId>org.apache.sqoop</groupId>
   <artifactId>sqoop-server</artifactId>
   <name>Sqoop Server</name>
-  <packaging>war</packaging>
 
   <dependencies>
     <dependency>
@@ -114,14 +113,6 @@ limitations under the License.
       <scope>provided</scope>
     </dependency>
 
-<!--
-    <dependency>
-      <groupId>org.apache.sqoop.connector</groupId>
-      <artifactId>sqoop-connector-mysql-jdbc</artifactId>
-      <version>2.0.0-SNAPSHOT</version>
-    </dependency>
--->
-
     <dependency>
       <groupId>javax.servlet</groupId>
       <artifactId>servlet-api</artifactId>
@@ -132,10 +123,44 @@ limitations under the License.
       <groupId>com.googlecode.json-simple</groupId>
       <artifactId>json-simple</artifactId>
     </dependency>
+    <dependency>
+       <groupId>org.eclipse.jetty</groupId>
+       <artifactId>jetty-server</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-servlet</artifactId>
+    </dependency>
   </dependencies>
 
-  <build>
-    <finalName>sqoop</finalName>
-  </build>
+  <profiles>
+    <profile>
+      <id>binary</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-dependency-plugin</artifactId>
+            <executions>
+              <execution>
+                <phase>package</phase>
+                <goals>
+                    <goal>copy-dependencies</goal>
+                </goals>
+                <configuration>
+                    <includeScope>runtime</includeScope>
+                    <excludeScope>test</excludeScope>
+                    <outputDirectory>${project.build.directory}/lib</outputDirectory>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
 
 </project>

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/server/src/main/java/org/apache/sqoop/server/SqoopJettyConstants.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/sqoop/server/SqoopJettyConstants.java b/server/src/main/java/org/apache/sqoop/server/SqoopJettyConstants.java
new file mode 100644
index 0000000..cc2d604
--- /dev/null
+++ b/server/src/main/java/org/apache/sqoop/server/SqoopJettyConstants.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.server;
+
+import org.apache.sqoop.core.ConfigurationConstants;
+
+public class SqoopJettyConstants {
+
+  /**
+   * All audit logger related configuration is prefixed with this:
+   * <tt>org.apache.sqoop.jetty.</tt>
+   */
+  public static final String PREFIX_JETTY_CONFIG =
+          ConfigurationConstants.PREFIX_GLOBAL_CONFIG + "jetty.";
+
+  /**
+   * Max number of worker in thread pool to be used, specified by:
+   * <tt>org.apache.sqoop.jetty.thread.pool.worker.max</tt>
+   */
+  public static final String SYSCFG_JETTY_THREAD_POOL_MAX_WORKER = PREFIX_JETTY_CONFIG
+          + "thread.pool.worker.max";
+
+  public static final int DEFAULT_JETTY_THREAD_POOL_MAX_WORKER = 500;
+
+  /**
+   * Min number of worker in thread pool to be used, specified by:
+   * <tt>org.apache.sqoop.jetty.thread.pool.worker.min</tt>
+   */
+  public static final String SYSCFG_JETTY_THREAD_POOL_MIN_WORKER = PREFIX_JETTY_CONFIG
+          + "thread.pool.worker.min";
+
+  public static final int DEFAULT_JETTY_THREAD_POOL_MIN_WORKER = 5;
+
+  /**
+   * Alive time of Worker in thread pool to be used, specified by:
+   * <tt>org.apache.sqoop.jetty.thread.pool.worker.alive.time</tt>
+   */
+  public static final String SYSCFG_JETTY_THREAD_POOL_WORKER_ALIVE_TIME = PREFIX_JETTY_CONFIG
+          + "thread.pool.worker.alive.time";
+
+  public static final long DEFAULT_JETTY_THREAD_POOL_WORKER_ALIVE_TIME = 60;
+
+  /**
+   * The Port number for Jetty server:
+   * <tt>org.apache.sqoop.jetty.port</tt>
+   */
+  public static final String SYSCFG_JETTY_PORT = PREFIX_JETTY_CONFIG + "port";
+
+  public static final int DEFAULT_SYSCFG_JETTY_PORT = 12000;
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/server/src/main/java/org/apache/sqoop/server/SqoopJettyContext.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/sqoop/server/SqoopJettyContext.java b/server/src/main/java/org/apache/sqoop/server/SqoopJettyContext.java
new file mode 100644
index 0000000..9bb3b97
--- /dev/null
+++ b/server/src/main/java/org/apache/sqoop/server/SqoopJettyContext.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.server;
+
+import org.apache.log4j.Logger;
+import org.apache.sqoop.common.MapContext;
+
+public class SqoopJettyContext {
+  private static final Logger LOG =
+          Logger.getLogger(SqoopJettyContext.class);
+
+  private final MapContext context;
+  private final int minWorkerThreads;
+  private final int maxWorkerThreads;
+  private final int port;
+  private final long workerKeepAliveTime;
+
+  public SqoopJettyContext(MapContext context) {
+    this.context = context;
+    port = context.getInt(SqoopJettyConstants.SYSCFG_JETTY_PORT,
+            SqoopJettyConstants.DEFAULT_SYSCFG_JETTY_PORT);
+    minWorkerThreads = context.getInt(SqoopJettyConstants.SYSCFG_JETTY_THREAD_POOL_MIN_WORKER,
+            SqoopJettyConstants.DEFAULT_JETTY_THREAD_POOL_MIN_WORKER);
+    maxWorkerThreads = context.getInt(SqoopJettyConstants.SYSCFG_JETTY_THREAD_POOL_MAX_WORKER,
+            SqoopJettyConstants.DEFAULT_JETTY_THREAD_POOL_MAX_WORKER);
+    workerKeepAliveTime = context.getLong(SqoopJettyConstants.SYSCFG_JETTY_THREAD_POOL_WORKER_ALIVE_TIME,
+            SqoopJettyConstants.DEFAULT_JETTY_THREAD_POOL_WORKER_ALIVE_TIME);
+  }
+
+  public int getMinWorkerThreads() {
+    return minWorkerThreads;
+  }
+
+  public int getMaxWorkerThreads() {
+    return maxWorkerThreads;
+  }
+
+  public int getPort() {
+    return port;
+  }
+
+  public long getWorkerKeepAliveTime() {
+    return workerKeepAliveTime;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/server/src/main/java/org/apache/sqoop/server/SqoopJettyServer.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/sqoop/server/SqoopJettyServer.java b/server/src/main/java/org/apache/sqoop/server/SqoopJettyServer.java
new file mode 100644
index 0000000..3c46b1a
--- /dev/null
+++ b/server/src/main/java/org/apache/sqoop/server/SqoopJettyServer.java
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.server;
+
+import org.apache.log4j.Logger;
+import org.apache.sqoop.core.SqoopConfiguration;
+import org.apache.sqoop.core.SqoopServer;
+import org.apache.sqoop.filter.SqoopAuthenticationFilter;
+import org.apache.sqoop.server.v1.*;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.util.thread.ExecutorThreadPool;
+import org.eclipse.jetty.server.ServerConnector;
+
+import javax.servlet.DispatcherType;
+import java.util.EnumSet;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.SynchronousQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Create a Jetty embedded server to answer http requests. The primary goal is
+ * to serve up status information for the server. There are three contexts:
+ * "/logs/" -> points to the log directory "/static/" -> points to common static
+ * files (src/webapps/static) "/" -> the jsp server code from
+ * (src/webapps/<name>)
+ */
+public class SqoopJettyServer {
+  private static final Logger LOG = Logger.getLogger(SqoopJettyServer.class);
+  private Server webServer;
+
+  public SqoopJettyServer() {
+    SqoopServer.initialize();
+    SqoopJettyContext sqoopJettyContext = new SqoopJettyContext(SqoopConfiguration.getInstance().getContext());
+    // Server thread pool
+    // Start with minWorkerThreads, expand till maxWorkerThreads and reject subsequent requests
+    ExecutorService executorService = new ThreadPoolExecutor(sqoopJettyContext.getMinWorkerThreads(),
+            sqoopJettyContext.getMaxWorkerThreads(),
+            sqoopJettyContext.getWorkerKeepAliveTime(), TimeUnit.SECONDS, new SynchronousQueue<Runnable>());
+    ExecutorThreadPool threadPool = new ExecutorThreadPool(executorService);
+    webServer = new Server(threadPool);
+
+    // Connector configs
+    ServerConnector connector = new ServerConnector(webServer);
+    connector.setPort(sqoopJettyContext.getPort());
+    webServer.addConnector(connector);
+    webServer.setHandler(createServletContextHandler());
+  }
+
+  public synchronized void startServer() {
+    try {
+      webServer.start();
+      LOG.info("Started Sqoop Jetty server.");
+    } catch (Exception e) {
+      LOG.error(e.getMessage(), e);
+      throw new RuntimeException("Sqoop server failed to start.", e);
+    }
+  }
+
+  public synchronized void joinServerThread() {
+    try {
+      webServer.join();
+    } catch (InterruptedException ie) {
+      LOG.info("Sqoop Jetty server is interrupted.");
+    }
+  }
+
+  // this method is only for test
+  public synchronized void stopServerForTest() {
+    try {
+      if (webServer != null && webServer.isStarted()) {
+        webServer.stop();
+        SqoopServer.destroy();
+        LOG.info("Stopped Sqoop Jetty server.");
+      }
+    } catch (Exception e) {
+      LOG.error(e.getMessage(), e);
+    }
+  }
+
+  // this method is only for test
+  public String getServerUrl() {
+    return webServer.getURI().toString() + "/";
+  }
+
+  private static ServletContextHandler createServletContextHandler() {
+    ServletContextHandler context = new ServletContextHandler();
+    context.setContextPath("/sqoop");
+    context.addServlet(AuthorizationServlet.class, "/v1/authorization/*");
+    context.addServlet(ConfigurableServlet.class, "/v1/configurable/*");
+    context.addServlet(ConnectorServlet.class, "/v1/connector/*");
+    context.addServlet(DriverServlet.class, "/v1/driver/*");
+    context.addServlet(JobServlet.class, "/v1/job/*");
+    context.addServlet(JobsServlet.class, "/v1/jobs/*");
+    context.addServlet(LinkServlet.class, "/v1/link/*");
+    context.addServlet(LinksServlet.class, "/v1/links/*");
+    context.addServlet(SubmissionsServlet.class, "/v1/submissions/*");
+    context.addServlet(VersionServlet.class, "/version");
+    context.addFilter(SqoopAuthenticationFilter.class, "/*", EnumSet.allOf(DispatcherType.class));
+    return context;
+  }
+
+  public static void main(String[] args) {
+    SqoopJettyServer sqoopJettyServer = new SqoopJettyServer();
+    sqoopJettyServer.startServer();
+    sqoopJettyServer.joinServerThread();
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/test/pom.xml
----------------------------------------------------------------------
diff --git a/test/pom.xml b/test/pom.xml
index 8218477..54d54f4 100644
--- a/test/pom.xml
+++ b/test/pom.xml
@@ -86,6 +86,11 @@ limitations under the License.
     </dependency>
 
     <dependency>
+      <groupId>org.apache.sqoop</groupId>
+      <artifactId>sqoop-server</artifactId>
+    </dependency>
+
+    <dependency>
       <groupId>org.apache.sqoop.connector</groupId>
       <artifactId>sqoop-connector-kafka</artifactId>
     </dependency>
@@ -239,7 +244,6 @@ limitations under the License.
           </execution>
         </executions>
       </plugin>
-
     </plugins>
   </build>
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/test/src/main/java/org/apache/sqoop/test/infrastructure/providers/SqoopInfrastructureProvider.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/infrastructure/providers/SqoopInfrastructureProvider.java b/test/src/main/java/org/apache/sqoop/test/infrastructure/providers/SqoopInfrastructureProvider.java
index 5b4f595..9b0d521 100644
--- a/test/src/main/java/org/apache/sqoop/test/infrastructure/providers/SqoopInfrastructureProvider.java
+++ b/test/src/main/java/org/apache/sqoop/test/infrastructure/providers/SqoopInfrastructureProvider.java
@@ -19,8 +19,8 @@ package org.apache.sqoop.test.infrastructure.providers;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
+import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
 import org.apache.sqoop.test.minicluster.SqoopMiniCluster;
-import org.apache.sqoop.test.minicluster.TomcatSqoopMiniCluster;
 
 /**
  * Sqoop infrastructure provider.
@@ -37,7 +37,7 @@ public class SqoopInfrastructureProvider extends InfrastructureProvider {
   @Override
   public void start() {
     try {
-      instance = new TomcatSqoopMiniCluster(rootPath, hadoopConf);
+      instance = new JettySqoopMiniCluster(rootPath, hadoopConf);
       instance.start();
     } catch (Exception e) {
       LOG.error("Could not start Sqoop mini cluster.", e);

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/test/src/main/java/org/apache/sqoop/test/minicluster/JettySqoopMiniCluster.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/minicluster/JettySqoopMiniCluster.java b/test/src/main/java/org/apache/sqoop/test/minicluster/JettySqoopMiniCluster.java
new file mode 100644
index 0000000..325a790
--- /dev/null
+++ b/test/src/main/java/org/apache/sqoop/test/minicluster/JettySqoopMiniCluster.java
@@ -0,0 +1,64 @@
+/**
+  * Licensed to the Apache Software Foundation (ASF) under one
+  * or more contributor license agreements.  See the NOTICE file
+  * distributed with this work for additional information
+  * regarding copyright ownership.  The ASF licenses this file
+  * to you under the Apache License, Version 2.0 (the
+  * "License"); you may not use this file except in compliance
+  * with the License.  You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+
+package org.apache.sqoop.test.minicluster;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.apache.sqoop.server.SqoopJettyServer;
+
+/**
+* Embedded jetty Sqoop server mini cluster.
+*
+* This mini cluster will start up embedded jetty
+*/
+public class JettySqoopMiniCluster extends SqoopMiniCluster {
+
+  private SqoopJettyServer sqoopJettyServer;
+
+  /** {@inheritDoc} */
+  public JettySqoopMiniCluster(String temporaryPath, Configuration configuration) throws Exception {
+    super(temporaryPath, configuration);
+  }
+
+  @Override
+  public void start() throws Exception {
+    prepareTemporaryPath();
+    sqoopJettyServer = new SqoopJettyServer();
+    sqoopJettyServer.startServer();
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void stop() throws Exception {
+    if (sqoopJettyServer != null) {
+      sqoopJettyServer.stopServerForTest();
+    }
+  }
+
+  /**
+  * Return server URL.
+  */
+  @Override
+  public String getServerUrl() {
+    if (sqoopJettyServer != null) {
+      return sqoopJettyServer.getServerUrl();
+    }
+    throw new RuntimeException("Jetty server wasn't started.");
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java b/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
index 4452558..5435ade 100644
--- a/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
+++ b/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
@@ -47,7 +47,8 @@ import org.testng.annotations.BeforeSuite;
  * In addition to pure Tomcat based test case it will also create and initialize
  * the database provider prior every test execution.
  */
-abstract public class ConnectorTestCase extends TomcatTestCase {
+@edu.umd.cs.findbugs.annotations.SuppressWarnings({"MS_PKGPROTECT", "ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD"})
+abstract public class ConnectorTestCase extends JettyTestCase {
 
   private static final Logger LOG = Logger.getLogger(ConnectorTestCase.class);
 
@@ -74,14 +75,14 @@ abstract public class ConnectorTestCase extends TomcatTestCase {
   };
 
   @BeforeSuite(alwaysRun = true)
-  public static void startProvider() throws Exception {
+  public void startProvider() throws Exception {
     provider = DatabaseProviderFactory.getProvider(System.getProperties());
     LOG.info("Starting database provider: " + provider.getClass().getName());
     provider.start();
   }
 
   @AfterSuite(alwaysRun = true)
-  public static void stopProvider() {
+  public void stopProvider() {
     LOG.info("Stopping database provider: " + provider.getClass().getName());
     provider.stop();
   }

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/test/src/main/java/org/apache/sqoop/test/testcases/JettyTestCase.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/testcases/JettyTestCase.java b/test/src/main/java/org/apache/sqoop/test/testcases/JettyTestCase.java
new file mode 100644
index 0000000..d967aaa
--- /dev/null
+++ b/test/src/main/java/org/apache/sqoop/test/testcases/JettyTestCase.java
@@ -0,0 +1,240 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.test.testcases;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Method;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.log4j.Logger;
+import org.apache.sqoop.client.SqoopClient;
+import org.apache.sqoop.test.asserts.HdfsAsserts;
+import org.apache.sqoop.test.hadoop.HadoopMiniClusterRunner;
+import org.apache.sqoop.test.hadoop.HadoopRunner;
+import org.apache.sqoop.test.hadoop.HadoopRunnerFactory;
+import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
+import org.apache.sqoop.test.minicluster.SqoopMiniCluster;
+import org.apache.sqoop.test.utils.HdfsUtils;
+import org.testng.ITest;
+import org.testng.ITestContext;
+import org.testng.annotations.AfterSuite;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.BeforeSuite;
+
+/**
+ * Basic test case that will bootstrap Sqoop server running in embedded Jetty
+ * process.
+ */
+@edu.umd.cs.findbugs.annotations.SuppressWarnings({"MS_PKGPROTECT", "ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD"})
+abstract public class JettyTestCase implements ITest {
+  private static final Logger LOG = Logger.getLogger(JettyTestCase.class);
+
+  public String methodName;
+
+  /**
+   * Temporary base path that will be used for tests.
+   *
+   * By default we will take a look for sqoop.integration.tmpdir property that is
+   * filled up by maven. If the test is not started from maven (IDE) we will
+   * pick up configured java.io.tmpdir value. The last results is /tmp/ directory
+   * in case that no property is set.
+   */
+  protected static final String TMP_PATH_BASE =
+    System.getProperty("sqoop.integration.tmpdir", System.getProperty("java.io.tmpdir", "/tmp")) + "/sqoop-cargo-tests";
+
+  /**
+   * Temporary directory that will be used by the test.
+   *
+   * We will take TMP_PATH_BASE and append the test suite. For example:
+   *
+   * TMP_PATH_BASE/TestConnectorsSuite
+   */
+  private static String tmpPath;
+
+  /**
+   * Hadoop cluster
+   */
+  protected static HadoopRunner hadoopCluster;
+
+  /**
+   * Hadoop client
+   */
+  protected static FileSystem hdfsClient;
+
+  /**
+   * Jetty based Sqoop mini cluster
+   */
+  private static JettySqoopMiniCluster cluster;
+
+  /**
+   * Sqoop client API.
+   */
+  private static SqoopClient client;
+
+  /**
+   * Use the method name as the test name
+   */
+  public String getTestName() {
+    return methodName;
+  }
+
+  @BeforeMethod(alwaysRun = true)
+  public void setMethodName(Method method) throws Exception {
+    methodName = method.getName();
+  }
+
+  @BeforeSuite(alwaysRun = true)
+  public void setupSuite(ITestContext context) throws Exception {
+    tmpPath = HdfsUtils.joinPathFragments(TMP_PATH_BASE, context.getSuite().getName());
+
+    LOG.debug("Temporary Directory: " + getTemporaryPath());
+    FileUtils.deleteDirectory(new File(getTemporaryPath()));
+
+    startHadoop();
+    startSqoop();
+  }
+
+  @AfterSuite(alwaysRun = true)
+  public void tearDownSuite() throws Exception {
+    stopSqoop();
+    stopHadoop();
+  }
+
+  protected void startHadoop() throws Exception {
+    // Start Hadoop Clusters
+    hadoopCluster = HadoopRunnerFactory.getHadoopCluster(System.getProperties(), HadoopMiniClusterRunner.class);
+    hadoopCluster.setTemporaryPath(getTemporaryPath());
+    hadoopCluster.setConfiguration(hadoopCluster.prepareConfiguration(new JobConf()));
+    hadoopCluster.start();
+
+    // Initialize Hdfs Client
+    hdfsClient = FileSystem.get(hadoopCluster.getConfiguration());
+    LOG.debug("HDFS Client: " + hdfsClient);
+  }
+
+  protected void startSqoop() throws Exception {
+    // Start server
+    cluster = createSqoopMiniCluster();
+    cluster.start();
+
+    // Initialize Sqoop Client API
+    client = new SqoopClient(getServerUrl());
+  }
+
+  protected void stopSqoop() throws Exception {
+    cluster.stop();
+  }
+
+  protected void stopHadoop() throws Exception {
+    hadoopCluster.stop();
+  }
+
+  /**
+   * Create Sqoop MiniCluster instance that should be used for this test.
+   *
+   * @return New instance of test mini cluster
+   */
+  public JettySqoopMiniCluster createSqoopMiniCluster() throws Exception {
+    return new JettySqoopMiniCluster(getSqoopMiniClusterTemporaryPath(), hadoopCluster.getConfiguration());
+  }
+
+  /**
+   * Return SqoopClient configured to talk to testing server.
+   *
+   * @return
+   */
+  public static SqoopClient getClient() {
+    return client;
+  }
+
+  public static void setClient(SqoopClient sqoopClient) {
+    client = sqoopClient;
+  }
+
+  public static SqoopMiniCluster getCluster() {
+    return cluster;
+  }
+
+  public static void setCluster(JettySqoopMiniCluster sqoopMiniClusterluster) {
+    cluster = sqoopMiniClusterluster;
+  }
+
+  public static String getTemporaryPath() {
+    return tmpPath;
+  }
+
+  public static String getSqoopMiniClusterTemporaryPath() {
+    return HdfsUtils.joinPathFragments(getTemporaryPath(), "sqoop-mini-cluster");
+  }
+
+  /**
+   * Return testing server URL
+   *
+   * @return
+   */
+  public static String getServerUrl() {
+    return cluster.getServerUrl();
+  }
+
+  /**
+   * Return mapreduce base directory.
+   *
+   * @return
+   */
+  public String getMapreduceDirectory() {
+    return HdfsUtils.joinPathFragments(hadoopCluster.getTestDirectory(), getClass().getName(), getTestName());
+  }
+
+  /**
+   * Assert that execution has generated following lines.
+   *
+   * As the lines can be spread between multiple files the ordering do not make
+   * a difference.
+   *
+   * @param lines
+   * @throws IOException
+   */
+  protected void assertTo(String... lines) throws IOException {
+    // TODO(VB): fix this to be not directly dependent on hdfs/MR
+    HdfsAsserts.assertMapreduceOutput(hdfsClient, getMapreduceDirectory(), lines);
+  }
+
+  /**
+   * Verify number of TO files.
+   *
+   * @param expectedFiles Expected number of files
+   */
+  protected void assertToFiles(int expectedFiles) throws IOException {
+    // TODO(VB): fix this to be not directly dependent on hdfs/MR
+    HdfsAsserts.assertMapreduceOutputFiles(hdfsClient, getMapreduceDirectory(), expectedFiles);
+  }
+
+  /**
+   * Create FROM file with specified content.
+   *
+   * @param filename Input file name
+   * @param lines Individual lines that should be written into the file
+   * @throws IOException
+   */
+  protected void createFromFile(String filename, String...lines) throws IOException {
+    HdfsUtils.createFile(hdfsClient, HdfsUtils.joinPathFragments(getMapreduceDirectory(), filename), lines);
+  }
+}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java b/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
deleted file mode 100644
index 666749b..0000000
--- a/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
+++ /dev/null
@@ -1,239 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sqoop.test.testcases;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.Method;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.log4j.Logger;
-import org.apache.sqoop.client.SqoopClient;
-import org.apache.sqoop.test.asserts.HdfsAsserts;
-import org.apache.sqoop.test.hadoop.HadoopMiniClusterRunner;
-import org.apache.sqoop.test.hadoop.HadoopRunner;
-import org.apache.sqoop.test.hadoop.HadoopRunnerFactory;
-import org.apache.sqoop.test.minicluster.SqoopMiniCluster;
-import org.apache.sqoop.test.minicluster.TomcatSqoopMiniCluster;
-import org.apache.sqoop.test.utils.HdfsUtils;
-import org.testng.ITest;
-import org.testng.ITestContext;
-import org.testng.annotations.AfterSuite;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.BeforeSuite;
-
-/**
- * Basic test case that will bootstrap Sqoop server running in external Tomcat
- * process.
- */
-abstract public class TomcatTestCase implements ITest {
-  private static final Logger LOG = Logger.getLogger(TomcatTestCase.class);
-
-  public String methodName;
-
-  /**
-   * Temporary base path that will be used for tests.
-   *
-   * By default we will take a look for sqoop.integration.tmpdir property that is
-   * filled up by maven. If the test is not started from maven (IDE) we will
-   * pick up configured java.io.tmpdir value. The last results is /tmp/ directory
-   * in case that no property is set.
-   */
-  protected static final String TMP_PATH_BASE =
-    System.getProperty("sqoop.integration.tmpdir", System.getProperty("java.io.tmpdir", "/tmp")) + "/sqoop-cargo-tests";
-
-  /**
-   * Temporary directory that will be used by the test.
-   *
-   * We will take TMP_PATH_BASE and append the test suite. For example:
-   *
-   * TMP_PATH_BASE/TestConnectorsSuite
-   */
-  private static String tmpPath;
-
-  /**
-   * Hadoop cluster
-   */
-  protected static HadoopRunner hadoopCluster;
-
-  /**
-   * Hadoop client
-   */
-  protected static FileSystem hdfsClient;
-
-  /**
-   * Tomcat based Sqoop mini cluster
-   */
-  private static TomcatSqoopMiniCluster cluster;
-
-  /**
-   * Sqoop client API.
-   */
-  private static SqoopClient client;
-
-  /**
-   * Use the method name as the test name
-   */
-  public String getTestName() {
-    return methodName;
-  }
-
-  @BeforeMethod(alwaysRun = true)
-  public void setMethodName(Method method) throws Exception {
-    methodName = method.getName();
-  }
-
-  @BeforeSuite(alwaysRun = true)
-  public void setupSuite(ITestContext context) throws Exception {
-    tmpPath = HdfsUtils.joinPathFragments(TMP_PATH_BASE, context.getSuite().getName());
-
-    LOG.debug("Temporary Directory: " + getTemporaryPath());
-    FileUtils.deleteDirectory(new File(getTemporaryPath()));
-
-    startHadoop();
-    startSqoop();
-  }
-
-  @AfterSuite(alwaysRun = true)
-  public void tearDownSuite() throws Exception {
-    stopSqoop();
-    stopHadoop();
-  }
-
-  protected void startHadoop() throws Exception {
-    // Start Hadoop Clusters
-    hadoopCluster = HadoopRunnerFactory.getHadoopCluster(System.getProperties(), HadoopMiniClusterRunner.class);
-    hadoopCluster.setTemporaryPath(getTemporaryPath());
-    hadoopCluster.setConfiguration(hadoopCluster.prepareConfiguration(new JobConf()));
-    hadoopCluster.start();
-
-    // Initialize Hdfs Client
-    hdfsClient = FileSystem.get(hadoopCluster.getConfiguration());
-    LOG.debug("HDFS Client: " + hdfsClient);
-  }
-
-  protected void startSqoop() throws Exception {
-    // Start server
-    cluster = createSqoopMiniCluster();
-    cluster.start();
-
-    // Initialize Sqoop Client API
-    client = new SqoopClient(getServerUrl());
-  }
-
-  protected void stopSqoop() throws Exception {
-    cluster.stop();
-  }
-
-  protected void stopHadoop() throws Exception {
-    hadoopCluster.stop();
-  }
-
-  /**
-   * Create Sqoop MiniCluster instance that should be used for this test.
-   *
-   * @return New instance of test mini cluster
-   */
-  public TomcatSqoopMiniCluster createSqoopMiniCluster() throws Exception {
-    return new TomcatSqoopMiniCluster(getSqoopMiniClusterTemporaryPath(), hadoopCluster.getConfiguration());
-  }
-
-  /**
-   * Return SqoopClient configured to talk to testing server.
-   *
-   * @return
-   */
-  public static SqoopClient getClient() {
-    return client;
-  }
-
-  public static void setClient(SqoopClient sqoopClient) {
-    client = sqoopClient;
-  }
-
-  public static SqoopMiniCluster getCluster() {
-    return cluster;
-  }
-
-  public static void setCluster(TomcatSqoopMiniCluster sqoopMiniClusterluster) {
-    cluster = sqoopMiniClusterluster;
-  }
-
-  public static String getTemporaryPath() {
-    return tmpPath;
-  }
-
-  public static String getSqoopMiniClusterTemporaryPath() {
-    return HdfsUtils.joinPathFragments(getTemporaryPath(), "sqoop-mini-cluster");
-  }
-
-  /**
-   * Return testing server URL
-   *
-   * @return
-   */
-  public static String getServerUrl() {
-    return cluster.getServerUrl();
-  }
-
-  /**
-   * Return mapreduce base directory.
-   *
-   * @return
-   */
-  public String getMapreduceDirectory() {
-    return HdfsUtils.joinPathFragments(hadoopCluster.getTestDirectory(), getClass().getName(), getTestName());
-  }
-
-  /**
-   * Assert that execution has generated following lines.
-   *
-   * As the lines can be spread between multiple files the ordering do not make
-   * a difference.
-   *
-   * @param lines
-   * @throws IOException
-   */
-  protected void assertTo(String... lines) throws IOException {
-    // TODO(VB): fix this to be not directly dependent on hdfs/MR
-    HdfsAsserts.assertMapreduceOutput(hdfsClient, getMapreduceDirectory(), lines);
-  }
-
-  /**
-   * Verify number of TO files.
-   *
-   * @param expectedFiles Expected number of files
-   */
-  protected void assertToFiles(int expectedFiles) throws IOException {
-    // TODO(VB): fix this to be not directly dependent on hdfs/MR
-    HdfsAsserts.assertMapreduceOutputFiles(hdfsClient, getMapreduceDirectory(), expectedFiles);
-  }
-
-  /**
-   * Create FROM file with specified content.
-   *
-   * @param filename Input file name
-   * @param lines Individual lines that should be written into the file
-   * @throws IOException
-   */
-  protected void createFromFile(String filename, String...lines) throws IOException {
-    HdfsUtils.createFile(hdfsClient, HdfsUtils.joinPathFragments(getMapreduceDirectory(), filename), lines);
-  }
-}

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/DerbyRepositoryUpgradeTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/DerbyRepositoryUpgradeTest.java b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/DerbyRepositoryUpgradeTest.java
index 1d5b692..98e1fa1 100644
--- a/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/DerbyRepositoryUpgradeTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/repository/derby/upgrade/DerbyRepositoryUpgradeTest.java
@@ -19,8 +19,8 @@ package org.apache.sqoop.integration.repository.derby.upgrade;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.sqoop.client.SqoopClient;
-import org.apache.sqoop.test.minicluster.TomcatSqoopMiniCluster;
-import org.apache.sqoop.test.testcases.TomcatTestCase;
+import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
+import org.apache.sqoop.test.testcases.JettyTestCase;
 import org.apache.sqoop.test.utils.CompressionUtils;
 import org.apache.sqoop.test.utils.HdfsUtils;
 import org.testng.ITestContext;
@@ -51,14 +51,14 @@ import static org.testng.Assert.assertNotNull;
  * methods describing content of the repository (what links/jobs it have, ...).
  *
  */
-public abstract class DerbyRepositoryUpgradeTest extends TomcatTestCase {
+public abstract class DerbyRepositoryUpgradeTest extends JettyTestCase {
 
   private static final Logger LOG = Logger.getLogger(DerbyRepositoryUpgradeTest.class);
 
   /**
    * Custom Sqoop mini cluster that points derby repository to real on-disk structures.
    */
-  public static class DerbySqoopMiniCluster extends TomcatSqoopMiniCluster {
+  public static class DerbySqoopMiniCluster extends JettySqoopMiniCluster {
     private String repositoryPath;
 
     public DerbySqoopMiniCluster(String repositoryPath, String temporaryPath, Configuration configuration) throws Exception {
@@ -66,6 +66,7 @@ public abstract class DerbyRepositoryUpgradeTest extends TomcatTestCase {
       this.repositoryPath = repositoryPath;
     }
 
+    @Override
     protected Map<String, String> getRepositoryConfiguration() {
       Map<String, String> properties = new HashMap<String, String>();
 
@@ -124,10 +125,10 @@ public abstract class DerbyRepositoryUpgradeTest extends TomcatTestCase {
   public abstract Integer[] getDeleteJobIds();
 
   public String getRepositoryPath() {
-    return HdfsUtils.joinPathFragments(getTemporaryTomcatPath(), "repo");
+    return HdfsUtils.joinPathFragments(getTemporaryJettyPath(), "repo");
   }
 
-  public String getTemporaryTomcatPath() {
+  public String getTemporaryJettyPath() {
     return HdfsUtils.joinPathFragments(getTemporaryPath(), getClass().getCanonicalName(), getTestName());
   }
 
@@ -149,7 +150,7 @@ public abstract class DerbyRepositoryUpgradeTest extends TomcatTestCase {
     CompressionUtils.untarStreamToDirectory(tarballStream, getRepositoryPath());
 
     // And use them for new Derby repo instance
-    setCluster(new DerbySqoopMiniCluster(getRepositoryPath(), getTemporaryTomcatPath() + "/sqoop-mini-cluster", hadoopCluster.getConfiguration()));
+    setCluster(new DerbySqoopMiniCluster(getRepositoryPath(), getTemporaryJettyPath() + "/sqoop-mini-cluster", hadoopCluster.getConfiguration()));
 
     // Start server
     getCluster().start();

http://git-wip-us.apache.org/repos/asf/sqoop/blob/c8780d0f/tools/pom.xml
----------------------------------------------------------------------
diff --git a/tools/pom.xml b/tools/pom.xml
index 65fd40c..0eb56dc 100644
--- a/tools/pom.xml
+++ b/tools/pom.xml
@@ -47,5 +47,33 @@ limitations under the License.
       <artifactId>commons-io</artifactId>
     </dependency>
   </dependencies>
-
+  <profiles>
+    <profile>
+      <id>binary</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-dependency-plugin</artifactId>
+            <executions>
+              <execution>
+                <phase>package</phase>
+                <goals>
+                  <goal>copy-dependencies</goal>
+                </goals>
+                <configuration>
+                  <includeScope>runtime</includeScope>
+                  <excludeScope>test</excludeScope>
+                  <outputDirectory>${project.build.directory}/lib</outputDirectory>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
 </project>