You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by da...@apache.org on 2022/05/16 06:10:37 UTC

[incubator-linkis] branch dev-1.1.2 updated: Add Sqoop Engine (#2109)

This is an automated email from the ASF dual-hosted git repository.

davidhua pushed a commit to branch dev-1.1.2
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git


The following commit(s) were added to refs/heads/dev-1.1.2 by this push:
     new ef3abb78f Add Sqoop Engine (#2109)
ef3abb78f is described below

commit ef3abb78f6184cdeb3033a415c21db9a0d8bdd97
Author: xiaojie19852006 <32...@users.noreply.github.com>
AuthorDate: Mon May 16 14:10:30 2022 +0800

    Add Sqoop Engine (#2109)
    
    * add sqoop engine
    
    * add sqoop license text
---
 LICENSE-binary                                     |   1 +
 NOTICE-binary                                      |  17 +-
 licenses-binary/LICENSE-sqoop.txt                  | 352 +++++++++++++
 .../engineconn-plugins/sqoop/pom.xml               | 285 +++++++++++
 .../sqoop/src/main/assembly/distribution.xml       | 324 ++++++++++++
 .../sqoop/client/LinkisSqoopClient.java            | 229 +++++++++
 .../engineconnplugin/sqoop/client/Sqoop.java       | 551 +++++++++++++++++++++
 .../sqoop/client/config/ExecutionContext.java      |  28 ++
 .../sqoop/client/config/ParamsMapping.java         | 144 ++++++
 .../client/exception/JobClosableException.java     |  36 ++
 .../client/exception/JobExecutionException.java    |  35 ++
 .../sqoop/client/utils/JarLoader.java              | 170 +++++++
 .../java/org/apache/sqoop/mapreduce/JobBase.java   | 410 +++++++++++++++
 .../main/resources/linkis-engineconn.properties    |  22 +
 .../sqoop/src/main/resources/log4j2.xml            |  82 +++
 .../sqoop/SqoopEngineConnPlugin.scala              |  66 +++
 .../sqoop/context/SqoopEngineConnContext.scala     |  29 ++
 .../sqoop/context/SqoopEnvConfiguration.scala      |  39 ++
 .../sqoop/context/SqoopParamsConfiguration.scala   |  42 ++
 .../sqoop/context/SqoopResourceConfiguration.scala |  30 ++
 .../sqoop/executor/SqoopExecutor.scala             |  50 ++
 .../sqoop/executor/SqoopOnceCodeExecutor.scala     | 142 ++++++
 .../sqoop/executor/SqoopOnceExecutor.scala         |  67 +++
 .../sqoop/factory/SqoopEngineConnFactory.scala     |  45 ++
 .../sqoop/factory/SqoopExecutorFactory.scala       |  43 ++
 .../launch/SqoopEngineConnLaunchBuilder.scala      |  92 ++++
 .../sqoop/params/SqoopParamsResolver.scala         |  34 ++
 .../resource/SqoopEngineConnResourceFactory.scala  |  31 ++
 .../engineconnplugin/sqoop/util/ClassUtil.scala    |  40 ++
 linkis-engineconn-plugins/pom.xml                  |   1 +
 tool/dependencies/known-dependencies.txt           |   1 +
 31 files changed, 3437 insertions(+), 1 deletion(-)

diff --git a/LICENSE-binary b/LICENSE-binary
index 1ab20f914..cc1f935a7 100644
--- a/LICENSE-binary
+++ b/LICENSE-binary
@@ -576,6 +576,7 @@ See licenses-binary/ for text of these licenses.
     (Apache License, version 2.0) Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.4.6 - https://zookeeper.apache.org/)
     (Apache License, version 2.0) Apache twill (org.apache.twill:* - https://twill.apache.org/)
     (Apache License, version 2.0) Apache tephra (co.cask.tephra:* - https://tephra.apache.org/)
+    (Apache License, version 2.0) Apache Sqoop - Server (org.apache.sqoop:sqoop:1.4.6 - https://github.com/apache/sqoop/)
 
 ========================================================================
 Third party CDDL licenses
diff --git a/NOTICE-binary b/NOTICE-binary
index a9edaf741..a713baacd 100644
--- a/NOTICE-binary
+++ b/NOTICE-binary
@@ -3447,4 +3447,19 @@ Apache Twill
 Copyright 2013-2017 The Apache Software Foundation
 
 This product includes software developed at
-The Apache Software Foundation (http://www.apache.org/).
\ No newline at end of file
+The Apache Software Foundation (http://www.apache.org/).
+
+   -------------------------------------- Sqoop NOTICE --------------------------------------
+   Apache Sqoop
+   Copyright 2011-2013 The Apache Software Foundation
+
+   This product includes software developed at
+   The Apache Software Foundation (http://www.apache.org/).
+
+   Portions of this software were developed at
+   Cloudera, Inc. (http://www.cloudera.com/).
+
+   Patch review tool is based on the Kafka patch review tool.
+   Which is part of the Apache Kafka project, an open source software
+   project with copyright by The Apache Software Foundation.
+   The original source code is available at: http://kafka.apache.org
\ No newline at end of file
diff --git a/licenses-binary/LICENSE-sqoop.txt b/licenses-binary/LICENSE-sqoop.txt
new file mode 100644
index 000000000..d6562216b
--- /dev/null
+++ b/licenses-binary/LICENSE-sqoop.txt
@@ -0,0 +1,352 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+===============================================================================
+
+This Apache Sqoop distribution includes the following sources/binaries.
+The use of these sources/binaries is subject to the terms and conditions
+of their respective licenses.
+
+For src/docs/web/docbook.css:
+
+  The BSD License
+
+  Copyright (c) 2001, 2003 The FreeBSD Documentation Project
+  All rights reserved.
+
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions
+  are met:
+  1. Redistributions of source code must retain the above copyright
+     notice, this list of conditions and the following disclaimer.
+  2. Redistributions in binary form must reproduce the above copyright
+     notice, this list of conditions and the following disclaimer in the
+     documentation and/or other materials provided with the distribution.
+
+  THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+  ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+  ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+  FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+  DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+  OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+  HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+  OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+  SUCH DAMAGE.
+
+For lib/ant-contrib-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/ant-eclipse-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/avro-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/avro-mapred-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/commons-codec-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/commons-compress-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/commons-io-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/commons-jexl-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/commons-logging-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/hsqldb-<version>.jar:
+
+  The BSD License
+
+For lib/ivy-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/jackson-annotations-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/jackson-core-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/jackson-core-asl-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/jackson-databind-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/jackson-mapper-asl-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/opencsv-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/paranamer-<version>.jar:
+
+  The BSD License
+
+For lib/parquet-avro-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/parquet-column-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/parquet-common-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/parquet-encoding-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/parquet-format-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/parquet-generator-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/parquet-hadoop-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/parquet-jackson-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/slf4j-api-<version>.jar:
+
+  The Apache License, Version 2.0
+
+For lib/snappy-java-<version>.jar:
+
+  The Apache License, Version 2.0
+
+Some parts of the code were copied from the Apache Hive Project:
+
+  The Apache License, Version 2.0
\ No newline at end of file
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/pom.xml b/linkis-engineconn-plugins/engineconn-plugins/sqoop/pom.xml
new file mode 100644
index 000000000..d1724cd3c
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/pom.xml
@@ -0,0 +1,285 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>linkis</artifactId>
+        <groupId>org.apache.linkis</groupId>
+        <version>1.1.1</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>linkis-engineplugin-sqoop</artifactId>
+    <properties>
+        <sqoop.version>1.4.6</sqoop.version>
+        <hive.version>3.1.2</hive.version>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-exec</artifactId>
+            <scope>provided</scope>
+            <version>1.3</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sqoop</groupId>
+            <artifactId>sqoop</artifactId>
+            <classifier>hadoop200</classifier>
+            <version>${sqoop.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-exec</artifactId>
+            <scope>provided</scope>
+            <version>1.3</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.avro</groupId>
+            <artifactId>avro</artifactId>
+            <scope>provided</scope>
+            <version>1.10.2</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-common</artifactId>
+            <scope>provided</scope>
+            <version>${hive.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>*</groupId>
+                    <artifactId>*</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-mapreduce-client-core</artifactId>
+            <version>${hadoop.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.linkis</groupId>
+            <artifactId>linkis-once-engineconn</artifactId>
+            <version>${linkis.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>commons-logging</artifactId>
+                    <groupId>commons-logging</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.linkis</groupId>
+            <artifactId>linkis-computation-engineconn</artifactId>
+            <version>${linkis.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-client</artifactId>
+            <version>${hadoop.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.mortbay.jetty</groupId>
+                    <artifactId>jetty</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.mortbay.jetty</groupId>
+                    <artifactId>jetty-util</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.sun.jersey</groupId>
+                    <artifactId>jersey-core</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.sun.jersey</groupId>
+                    <artifactId>jersey-server</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.sun.jersey</groupId>
+                    <artifactId>jersey-json</artifactId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>jsr311-api</artifactId>
+                    <groupId>javax.ws.rs</groupId>
+                </exclusion>
+                <exclusion>
+                    <groupId>net.java.dev.jets3t</groupId>
+                    <artifactId>jets3t</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.jcraft</groupId>
+                    <artifactId>jsch</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.google.code.findbugs</groupId>
+                    <artifactId>jsr305</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>xmlenc</groupId>
+                    <artifactId>xmlenc</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>net.java.dev.jets3t</groupId>
+                    <artifactId>jets3t</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.avro</groupId>
+                    <artifactId>avro</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-auth</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.jcraft</groupId>
+                    <artifactId>jsch</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.google.code.findbugs</groupId>
+                    <artifactId>jsr305</artifactId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>hadoop-hdfs</artifactId>
+                    <groupId>org.apache.hadoop</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.linkis</groupId>
+            <artifactId>linkis-engineconn-plugin-core</artifactId>
+            <version>${linkis.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.linkis</groupId>
+            <artifactId>linkis-rpc</artifactId>
+            <version>${linkis.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.linkis</groupId>
+            <artifactId>linkis-storage</artifactId>
+            <version>${linkis.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.linkis</groupId>
+            <artifactId>linkis-common</artifactId>
+            <version>${linkis.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.linkis</groupId>
+            <artifactId>linkis-bml-engine-hook</artifactId>
+            <version>${linkis.version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>commons-logging</artifactId>
+                    <groupId>commons-logging</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-deploy-plugin</artifactId>
+            </plugin>
+
+            <plugin>
+                <groupId>net.alchim31.maven</groupId>
+                <artifactId>scala-maven-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-jar-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.3</version>
+                <inherited>false</inherited>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                        <configuration>
+                            <descriptors>
+                                <descriptor>src/main/assembly/distribution.xml</descriptor>
+                            </descriptors>
+                        </configuration>
+                    </execution>
+                </executions>
+                <configuration>
+                    <skipAssembly>false</skipAssembly>
+                    <finalName>out</finalName>
+                    <appendAssemblyId>false</appendAssemblyId>
+                    <attach>false</attach>
+                    <descriptors>
+                        <descriptor>src/main/assembly/distribution.xml</descriptor>
+                    </descriptors>
+                </configuration>
+            </plugin>
+        </plugins>
+        <resources>
+            <resource>
+                <directory>src/main/java</directory>
+                <includes>
+                    <include>**/*.xml</include>
+                    <include>**/*.properties</include>
+                </includes>
+            </resource>
+            <resource>
+                <directory>src/main/resources</directory>
+                <excludes>
+                    <exclude>**/application.yml</exclude>
+                    <exclude>**/bootstrap.yml</exclude>
+                </excludes>
+            </resource>
+        </resources>
+    </build>
+</project>
\ No newline at end of file
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/assembly/distribution.xml
new file mode 100644
index 000000000..78f54c4c0
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/assembly/distribution.xml
@@ -0,0 +1,324 @@
+<!--
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<assembly
+        xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/2.3"
+        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+        xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/2.3 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+    <id>sqoop</id>
+    <formats>
+        <format>dir</format>
+    </formats>
+    <includeBaseDirectory>true</includeBaseDirectory>
+    <baseDirectory>sqoop</baseDirectory>
+
+    <dependencySets>
+        <dependencySet>
+            <!-- Enable access to all projects in the current multimodule build! <useAllReactorProjects>true</useAllReactorProjects> -->
+            <!-- Now, select which projects to include in this module-set. -->
+            <outputDirectory>/dist/v${sqoop.version}/lib</outputDirectory>
+            <useProjectArtifact>true</useProjectArtifact>
+            <useTransitiveDependencies>true</useTransitiveDependencies>
+            <unpack>false</unpack>
+            <useStrictFiltering>false</useStrictFiltering>
+            <useTransitiveFiltering>true</useTransitiveFiltering>
+
+            <excludes>
+                <exclude>antlr:antlr:jar</exclude>
+                <exclude>aopalliance:aopalliance:jar</exclude>
+                <exclude>asm:asm:jar</exclude>
+                <exclude>cglib:cglib:jar</exclude>
+                <exclude>com.amazonaws:aws-java-sdk-autoscaling:jar</exclude>
+                <exclude>com.amazonaws:aws-java-sdk-core:jar</exclude>
+                <exclude>com.amazonaws:aws-java-sdk-ec2:jar</exclude>
+                <exclude>com.amazonaws:aws-java-sdk-route53:jar</exclude>
+                <exclude>com.amazonaws:aws-java-sdk-sts:jar</exclude>
+                <exclude>com.amazonaws:jmespath-java:jar</exclude>
+                <exclude>com.fasterxml.jackson.core:jackson-annotations:jar</exclude>
+                <exclude>com.fasterxml.jackson.core:jackson-core:jar</exclude>
+                <exclude>com.fasterxml.jackson.core:jackson-databind:jar</exclude>
+                <exclude>com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar</exclude>
+                <exclude>com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar</exclude>
+                <exclude>com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar</exclude>
+                <exclude>com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:jar</exclude>
+                <exclude>com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:jar</exclude>
+                <exclude>com.fasterxml.jackson.module:jackson-module-jaxb-annotations:jar</exclude>
+                <exclude>com.fasterxml.jackson.module:jackson-module-parameter-names:jar</exclude>
+                <exclude>com.fasterxml.jackson.module:jackson-module-paranamer:jar</exclude>
+                <exclude>com.fasterxml.jackson.module:jackson-module-scala_2.11:jar</exclude>
+                <exclude>com.github.andrewoma.dexx:dexx-collections:jar</exclude>
+                <exclude>com.github.vlsi.compactmap:compactmap:jar</exclude>
+                <exclude>com.google.code.findbugs:annotations:jar</exclude>
+                <exclude>com.google.code.findbugs:jsr305:jar</exclude>
+                <exclude>com.google.code.gson:gson:jar</exclude>
+                <exclude>com.google.guava:guava:jar</exclude>
+                <exclude>com.google.inject:guice:jar</exclude>
+                <exclude>com.google.protobuf:protobuf-java:jar</exclude>
+                <exclude>com.netflix.archaius:archaius-core:jar</exclude>
+                <exclude>com.netflix.eureka:eureka-client:jar</exclude>
+                <exclude>com.netflix.eureka:eureka-core:jar</exclude>
+                <exclude>com.netflix.hystrix:hystrix-core:jar</exclude>
+                <exclude>com.netflix.netflix-commons:netflix-commons-util:jar</exclude>
+                <exclude>com.netflix.netflix-commons:netflix-eventbus:jar</exclude>
+                <exclude>com.netflix.netflix-commons:netflix-infix:jar</exclude>
+                <exclude>com.netflix.netflix-commons:netflix-statistics:jar</exclude>
+                <exclude>com.netflix.ribbon:ribbon:jar</exclude>
+                <exclude>com.netflix.ribbon:ribbon-core:jar</exclude>
+                <exclude>com.netflix.ribbon:ribbon-eureka:jar</exclude>
+                <exclude>com.netflix.ribbon:ribbon-httpclient:jar</exclude>
+                <exclude>com.netflix.ribbon:ribbon-loadbalancer:jar</exclude>
+                <exclude>com.netflix.ribbon:ribbon-transport:jar</exclude>
+                <exclude>com.netflix.servo:servo-core:jar</exclude>
+                <exclude>com.ning:async-http-client:jar</exclude>
+                <exclude>com.sun.jersey.contribs:jersey-apache-client4:jar</exclude>
+                <exclude>com.sun.jersey:jersey-client:jar</exclude>
+                <exclude>com.sun.jersey:jersey-core:jar</exclude>
+                <exclude>com.sun.jersey:jersey-json:jar</exclude>
+                <exclude>com.sun.jersey:jersey-server:jar</exclude>
+                <exclude>com.sun.jersey:jersey-servlet:jar</exclude>
+                <exclude>com.sun.xml.bind:jaxb-impl:jar</exclude>
+                <exclude>com.thoughtworks.paranamer:paranamer:jar</exclude>
+                <exclude>com.thoughtworks.xstream:xstream:jar</exclude>
+                <exclude>org.apache.linkis:linkis-common:jar</exclude>
+                <exclude>org.apache.linkis:linkis-module:jar</exclude>
+                <exclude>commons-beanutils:commons-beanutils:jar</exclude>
+                <exclude>commons-beanutils:commons-beanutils-core:jar</exclude>
+                <exclude>commons-cli:commons-cli:jar</exclude>
+                <exclude>commons-codec:commons-codec:jar</exclude>
+                <exclude>commons-collections:commons-collections:jar</exclude>
+                <exclude>commons-configuration:commons-configuration:jar</exclude>
+                <exclude>commons-daemon:commons-daemon:jar</exclude>
+                <exclude>commons-dbcp:commons-dbcp:jar</exclude>
+                <exclude>commons-digester:commons-digester:jar</exclude>
+                <exclude>commons-httpclient:commons-httpclient:jar</exclude>
+                <exclude>commons-io:commons-io:jar</exclude>
+                <exclude>commons-jxpath:commons-jxpath:jar</exclude>
+                <exclude>commons-lang:commons-lang:jar</exclude>
+                <exclude>commons-logging:commons-logging:jar</exclude>
+                <exclude>commons-net:commons-net:jar</exclude>
+                <exclude>commons-pool:commons-pool:jar</exclude>
+                <exclude>io.micrometer:micrometer-core:jar</exclude>
+                <exclude>io.netty:netty:jar</exclude>
+                <exclude>io.netty:netty-all:jar</exclude>
+                <exclude>io.netty:netty-buffer:jar</exclude>
+                <exclude>io.netty:netty-codec:jar</exclude>
+                <exclude>io.netty:netty-codec-http:jar</exclude>
+                <exclude>io.netty:netty-common:jar</exclude>
+                <exclude>io.netty:netty-handler:jar</exclude>
+                <exclude>io.netty:netty-transport:jar</exclude>
+                <exclude>io.netty:netty-transport-native-epoll:jar</exclude>
+                <exclude>io.reactivex:rxjava:jar</exclude>
+                <exclude>io.reactivex:rxnetty:jar</exclude>
+                <exclude>io.reactivex:rxnetty-contexts:jar</exclude>
+                <exclude>io.reactivex:rxnetty-servo:jar</exclude>
+                <exclude>javax.activation:activation:jar</exclude>
+                <exclude>javax.annotation:javax.annotation-api:jar</exclude>
+                <exclude>javax.inject:javax.inject:jar</exclude>
+                <exclude>javax.servlet:javax.servlet-api:jar</exclude>
+                <exclude>javax.servlet.jsp:jsp-api:jar</exclude>
+                <exclude>javax.validation:validation-api:jar</exclude>
+                <exclude>javax.websocket:javax.websocket-api:jar</exclude>
+                <exclude>javax.ws.rs:javax.ws.rs-api:jar</exclude>
+                <exclude>javax.xml.bind:jaxb-api:jar</exclude>
+                <exclude>javax.xml.stream:stax-api:jar</exclude>
+                <exclude>joda-time:joda-time:jar</exclude>
+                <exclude>log4j:log4j:jar</exclude>
+                <exclude>mysql:mysql-connector-java:jar</exclude>
+                <exclude>net.databinder.dispatch:dispatch-core_2.11:jar</exclude>
+                <exclude>net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar</exclude>
+                <exclude>org.antlr:antlr-runtime:jar</exclude>
+                <exclude>org.antlr:stringtemplate:jar</exclude>
+                <exclude>org.apache.commons:commons-compress:jar</exclude>
+                <exclude>org.apache.commons:commons-math:jar</exclude>
+                <exclude>org.apache.commons:commons-math3:jar</exclude>
+                <exclude>org.apache.curator:curator-client:jar</exclude>
+                <exclude>org.apache.curator:curator-framework:jar</exclude>
+                <exclude>org.apache.curator:curator-recipes:jar</exclude>
+                <exclude>org.apache.directory.api:api-asn1-api:jar</exclude>
+                <exclude>org.apache.directory.api:api-util:jar</exclude>
+                <exclude>org.apache.directory.server:apacheds-i18n:jar</exclude>
+                <exclude>org.apache.directory.server:apacheds-kerberos-codec:jar</exclude>
+                <exclude>org.apache.hadoop:hadoop-annotations:jar</exclude>
+                <exclude>org.apache.hadoop:hadoop-auth:jar</exclude>
+                <exclude>org.apache.hadoop:hadoop-common:jar</exclude>
+                <exclude>org.apache.hadoop:hadoop-hdfs:jar</exclude>
+                <exclude>org.apache.htrace:htrace-core:jar</exclude>
+                <exclude>org.apache.httpcomponents:httpclient:jar</exclude>
+                <exclude>org.apache.httpcomponents:httpcore:jar</exclude>
+                <exclude>org.apache.logging.log4j:log4j-api:jar</exclude>
+                <exclude>org.apache.logging.log4j:log4j-core:jar</exclude>
+                <exclude>org.apache.logging.log4j:log4j-jul:jar</exclude>
+                <exclude>org.apache.logging.log4j:log4j-slf4j-impl:jar</exclude>
+                <exclude>org.apache.zookeeper:zookeeper:jar</exclude>
+                <exclude>org.aspectj:aspectjweaver:jar</exclude>
+                <exclude>org.bouncycastle:bcpkix-jdk15on:jar</exclude>
+                <exclude>org.bouncycastle:bcprov-jdk15on:jar</exclude>
+                <exclude>org.codehaus.jackson:jackson-jaxrs:jar</exclude>
+                <exclude>org.codehaus.jackson:jackson-xc:jar</exclude>
+                <exclude>org.codehaus.jettison:jettison:jar</exclude>
+                <exclude>org.codehaus.woodstox:stax2-api:jar</exclude>
+                <exclude>org.codehaus.woodstox:woodstox-core-asl:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-annotations:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-client:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-continuation:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-http:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-io:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-jndi:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-plus:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-security:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-server:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-servlet:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-servlets:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-util:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-webapp:jar</exclude>
+                <exclude>org.eclipse.jetty:jetty-xml:jar</exclude>
+                <exclude>org.eclipse.jetty.websocket:javax-websocket-client-impl:jar</exclude>
+                <exclude>org.eclipse.jetty.websocket:javax-websocket-server-impl:jar</exclude>
+                <exclude>org.eclipse.jetty.websocket:websocket-api:jar</exclude>
+                <exclude>org.eclipse.jetty.websocket:websocket-client:jar</exclude>
+                <exclude>org.eclipse.jetty.websocket:websocket-common:jar</exclude>
+                <exclude>org.eclipse.jetty.websocket:websocket-server:jar</exclude>
+                <exclude>org.eclipse.jetty.websocket:websocket-servlet:jar</exclude>
+                <exclude>org.fusesource.leveldbjni:leveldbjni-all:jar</exclude>
+                <exclude>org.glassfish.hk2:class-model:jar</exclude>
+                <exclude>org.glassfish.hk2:config-types:jar</exclude>
+                <exclude>org.glassfish.hk2.external:aopalliance-repackaged:jar</exclude>
+                <exclude>org.glassfish.hk2.external:asm-all-repackaged:jar</exclude>
+                <exclude>org.glassfish.hk2.external:bean-validator:jar</exclude>
+                <exclude>org.glassfish.hk2.external:javax.inject:jar</exclude>
+                <exclude>org.glassfish.hk2:hk2:jar</exclude>
+                <exclude>org.glassfish.hk2:hk2-api:jar</exclude>
+                <exclude>org.glassfish.hk2:hk2-config:jar</exclude>
+                <exclude>org.glassfish.hk2:hk2-core:jar</exclude>
+                <exclude>org.glassfish.hk2:hk2-locator:jar</exclude>
+                <exclude>org.glassfish.hk2:hk2-runlevel:jar</exclude>
+                <exclude>org.glassfish.hk2:hk2-utils:jar</exclude>
+                <exclude>org.glassfish.hk2:osgi-resource-locator:jar</exclude>
+                <exclude>org.glassfish.hk2:spring-bridge:jar</exclude>
+                <exclude>org.glassfish.jersey.bundles:jaxrs-ri:jar</exclude>
+                <exclude>org.glassfish.jersey.bundles.repackaged:jersey-guava:jar</exclude>
+                <exclude>org.glassfish.jersey.containers:jersey-container-servlet:jar</exclude>
+                <exclude>org.glassfish.jersey.containers:jersey-container-servlet-core:jar</exclude>
+                <exclude>org.glassfish.jersey.core:jersey-client:jar</exclude>
+                <exclude>org.glassfish.jersey.core:jersey-common:jar</exclude>
+                <exclude>org.glassfish.jersey.core:jersey-server:jar</exclude>
+                <exclude>org.glassfish.jersey.ext:jersey-entity-filtering:jar</exclude>
+                <exclude>org.glassfish.jersey.ext:jersey-spring3:jar</exclude>
+                <exclude>org.glassfish.jersey.media:jersey-media-jaxb:jar</exclude>
+                <exclude>org.glassfish.jersey.media:jersey-media-json-jackson:jar</exclude>
+                <exclude>org.glassfish.jersey.media:jersey-media-multipart:jar</exclude>
+                <exclude>org.hdrhistogram:HdrHistogram:jar</exclude>
+                <exclude>org.javassist:javassist:jar</exclude>
+                <exclude>org.json4s:json4s-ast_2.11:jar</exclude>
+                <exclude>org.json4s:json4s-core_2.11:jar</exclude>
+                <exclude>org.json4s:json4s-jackson_2.11:jar</exclude>
+                <exclude>org.jsoup:jsoup:jar</exclude>
+                <exclude>org.jvnet.mimepull:mimepull:jar</exclude>
+                <exclude>org.jvnet:tiger-types:jar</exclude>
+                <exclude>org.latencyutils:LatencyUtils:jar</exclude>
+                <exclude>org.mortbay.jasper:apache-el:jar</exclude>
+                <exclude>org.mortbay.jetty:jetty:jar</exclude>
+                <exclude>org.mortbay.jetty:jetty-util:jar</exclude>
+                <exclude>org.ow2.asm:asm-analysis:jar</exclude>
+                <exclude>org.ow2.asm:asm-commons:jar</exclude>
+                <exclude>org.ow2.asm:asm-tree:jar</exclude>
+                <exclude>org.reflections:reflections:jar</exclude>
+                <exclude>org.scala-lang.modules:scala-parser-combinators_2.11:jar</exclude>
+                <exclude>org.scala-lang.modules:scala-xml_2.11:jar</exclude>
+                <exclude>org.scala-lang:scala-compiler:jar</exclude>
+                <exclude>org.scala-lang:scala-library:jar</exclude>
+                <exclude>org.scala-lang:scala-reflect:jar</exclude>
+                <exclude>org.scala-lang:scalap:jar</exclude>
+                <exclude>org.slf4j:jul-to-slf4j:jar</exclude>
+                <exclude>org.slf4j:slf4j-api:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot-actuator:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot-actuator-autoconfigure:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot-autoconfigure:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot-starter:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot-starter-actuator:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot-starter-aop:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot-starter-jetty:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot-starter-json:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot-starter-log4j2:jar</exclude>
+                <exclude>org.springframework.boot:spring-boot-starter-web:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-commons:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-config-client:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-context:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-netflix-archaius:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-netflix-core:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-netflix-eureka-client:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-netflix-ribbon:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-starter:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-starter-config:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-starter-eureka:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-starter-netflix-archaius:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-starter-netflix-eureka-client:jar</exclude>
+                <exclude>org.springframework.cloud:spring-cloud-starter-netflix-ribbon:jar</exclude>
+                <exclude>org.springframework.security:spring-security-crypto:jar</exclude>
+                <exclude>org.springframework.security:spring-security-rsa:jar</exclude>
+                <exclude>org.springframework:spring-aop:jar</exclude>
+                <exclude>org.springframework:spring-beans:jar</exclude>
+                <exclude>org.springframework:spring-context:jar</exclude>
+                <exclude>org.springframework:spring-core:jar</exclude>
+                <exclude>org.springframework:spring-expression:jar</exclude>
+                <exclude>org.springframework:spring-jcl:jar</exclude>
+                <exclude>org.springframework:spring-web:jar</exclude>
+                <exclude>org.springframework:spring-webmvc:jar</exclude>
+                <exclude>org.tukaani:xz:jar</exclude>
+                <exclude>org.yaml:snakeyaml:jar</exclude>
+                <exclude>software.amazon.ion:ion-java:jar</exclude>
+                <exclude>xerces:xercesImpl:jar</exclude>
+                <exclude>xmlenc:xmlenc:jar</exclude>
+                <exclude>xmlpull:xmlpull:jar</exclude>
+                <exclude>xpp3:xpp3_min:jar</exclude>
+            </excludes>
+        </dependencySet>
+    </dependencySets>
+
+    <fileSets>
+
+        <fileSet>
+            <directory>${basedir}/src/main/resources</directory>
+            <includes>
+                <include>*</include>
+            </includes>
+            <fileMode>0777</fileMode>
+            <directoryMode>0755</directoryMode>
+            <outputDirectory>/dist/v${sqoop.version}/conf</outputDirectory>
+            <lineEnding>unix</lineEnding>
+        </fileSet>
+
+        <fileSet>
+            <directory>${basedir}/target</directory>
+            <includes>
+                <include>*.jar</include>
+            </includes>
+            <excludes>
+                <exclude>*doc.jar</exclude>
+            </excludes>
+            <fileMode>0777</fileMode>
+            <outputDirectory>/plugin/${sqoop.version}</outputDirectory>
+        </fileSet>
+
+    </fileSets>
+
+
+</assembly>
+
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/LinkisSqoopClient.java b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/LinkisSqoopClient.java
new file mode 100644
index 000000000..9d364be57
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/LinkisSqoopClient.java
@@ -0,0 +1,229 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.client;
+
+import org.apache.linkis.common.exception.ErrorException;
+import org.apache.linkis.engineconnplugin.sqoop.client.utils.JarLoader;
+import org.apache.linkis.protocol.engine.JobProgressInfo;
+
+import org.apache.sqoop.SqoopOptions;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.io.Writer;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.Map;
+import java.util.function.Consumer;
+
+public class LinkisSqoopClient {
+    private static Class<?> sqoopEngineClass;
+    private static Logger logger = LoggerFactory.getLogger(LinkisSqoopClient.class);
+    private static JarLoader jarLoader;
+
+    public static int run(Map<String, String> params) {
+        try {
+            jarLoader =
+                    new JarLoader(
+                            new String[] {
+                                LinkisSqoopClient.class
+                                        .getProtectionDomain()
+                                        .getCodeSource()
+                                        .getLocation()
+                                        .getPath()
+                            });
+            // Load the sqoop class redefined by progress, notice that is not be resolved
+            jarLoader.loadClass("org.apache.sqoop.mapreduce.JobBase", false);
+            // Add the sqoop-{version}.jar to class path
+            jarLoader.addJarURL(
+                    SqoopOptions.class
+                            .getProtectionDomain()
+                            .getCodeSource()
+                            .getLocation()
+                            .getPath());
+            // Update the context loader
+            Thread.currentThread().setContextClassLoader(jarLoader);
+            sqoopEngineClass =
+                    jarLoader.loadClass("org.apache.linkis.engineconnplugin.sqoop.client.Sqoop");
+            Method method = sqoopEngineClass.getDeclaredMethod("main", Map.class);
+            return (Integer) method.invoke(null, params);
+        } catch (Throwable e) {
+            logger.error("Run Error Message:" + getLog(e), e);
+            return -1;
+        }
+    }
+
+    /** Close */
+    public static void close() {
+        operateInClassLoader(
+                jarLoader,
+                () -> {
+                    Method method = sqoopEngineClass.getDeclaredMethod("close");
+                    method.invoke(null);
+                    return null;
+                },
+                e -> logger.error("Close Error Message: {}", getLog(e)));
+    }
+
+    /**
+     * Fetch application id
+     *
+     * @return application id
+     */
+    public static String getApplicationId() {
+        return operateInClassLoader(
+                jarLoader,
+                () -> {
+                    Method method = sqoopEngineClass.getDeclaredMethod("getApplicationId");
+                    return (String) method.invoke(null);
+                },
+                e -> logger.error("Linkis SqoopClient getApplicationId: {}", getLog(e)));
+    }
+
+    /**
+     * Fetch application url
+     *
+     * @return url
+     */
+    public static String getApplicationURL() {
+        return operateInClassLoader(
+                jarLoader,
+                () -> {
+                    Method method = sqoopEngineClass.getDeclaredMethod("getApplicationURL");
+                    return (String) method.invoke(null);
+                },
+                e -> logger.error("Linkis SqoopClient getApplicationURL: {}", getLog(e)));
+    }
+
+    /**
+     * Progress value
+     *
+     * @return progress
+     */
+    public static Float progress() {
+        return operateInClassLoader(
+                jarLoader,
+                () -> {
+                    Method method = sqoopEngineClass.getDeclaredMethod("progress");
+                    return (Float) method.invoke(null);
+                },
+                e -> logger.error("Linkis SqoopClient progress: {}", getLog(e)));
+    }
+
+    /**
+     * ProgressInfo
+     *
+     * @return
+     */
+    @SuppressWarnings("unchecked")
+    public static JobProgressInfo getProgressInfo() {
+        return operateInClassLoader(
+                jarLoader,
+                () -> {
+                    Method method = sqoopEngineClass.getDeclaredMethod("getProgressInfo");
+                    return (JobProgressInfo) method.invoke(null);
+                },
+                e -> logger.error("Linkis SqoopClient getProgressInfo: {}", getLog(e)));
+    }
+
+    /**
+     * Get metrics
+     *
+     * @return map value
+     */
+    @SuppressWarnings("unchecked")
+    public static Map<String, Object> getMetrics() {
+        return operateInClassLoader(
+                jarLoader,
+                () -> {
+                    Method method = sqoopEngineClass.getDeclaredMethod("getMetrics");
+                    return (Map<String, Object>) method.invoke(null);
+                },
+                e -> logger.error("Linkis SqoopClient getMetrics: {}", getLog(e)));
+    }
+
+    /**
+     * Get diagnosis
+     *
+     * @return map value
+     */
+    @SuppressWarnings("unchecked")
+    public static Map<String, Object> getDiagnosis() {
+        return operateInClassLoader(
+                jarLoader,
+                () -> {
+                    Method method = sqoopEngineClass.getDeclaredMethod("getDiagnosis");
+                    return (Map<String, Object>) method.invoke(null);
+                },
+                e -> logger.error("Linkis SqoopClient getDiagnosis: {}", getLog(e)));
+    }
+
+    /**
+     * Console log
+     *
+     * @param e throwable
+     * @return log
+     */
+    private static String getLog(Throwable e) {
+        Writer result = new StringWriter();
+        PrintWriter printWriter = new PrintWriter(result);
+        e.printStackTrace(printWriter);
+        return e.toString();
+    }
+
+    /**
+     * Operate in special classloader
+     *
+     * @param classLoader classloader
+     * @param operation operation
+     * @param resolver resolver
+     * @param <R> return type
+     * @return return
+     */
+    private static <R> R operateInClassLoader(
+            ClassLoader classLoader, ClientOperation<R> operation, Consumer<Throwable> resolver) {
+        ClassLoader currentLoader = Thread.currentThread().getContextClassLoader();
+        R result = null;
+        try {
+            Thread.currentThread().setContextClassLoader(classLoader);
+            result = operation.operate();
+        } catch (Exception t) {
+            resolver.accept(t);
+        } finally {
+            Thread.currentThread().setContextClassLoader(currentLoader);
+        }
+        return result;
+    }
+
+    @FunctionalInterface
+    interface ClientOperation<T> {
+
+        /**
+         * Operate
+         *
+         * @return T
+         * @throws ErrorException error exception
+         */
+        T operate()
+                throws ErrorException, NoSuchMethodException, InvocationTargetException,
+                        IllegalAccessException;
+    }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java
new file mode 100644
index 000000000..bb1e0c093
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/Sqoop.java
@@ -0,0 +1,551 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.client;
+
+import org.apache.linkis.engineconnplugin.sqoop.client.config.ParamsMapping;
+import org.apache.linkis.engineconnplugin.sqoop.client.exception.JobClosableException;
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEnvConfiguration;
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopParamsConfiguration;
+import org.apache.linkis.protocol.engine.JobProgressInfo;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.mapred.TIPStatus;
+import org.apache.hadoop.mapreduce.*;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.sqoop.manager.SqlManager;
+import org.apache.sqoop.manager.oracle.OraOopManagerFactory;
+import org.apache.sqoop.util.LoggingUtils;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.manager.DefaultManagerFactory;
+import com.cloudera.sqoop.tool.SqoopTool;
+import com.cloudera.sqoop.util.OptionsFileUtil;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.net.MalformedURLException;
+import java.nio.file.Paths;
+import java.sql.SQLException;
+import java.util.*;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * Main entry-point for Sqoop Usage: hadoop jar (this_jar_name) com.cloudera.sqoop.Sqoop (options)
+ * See the SqoopOptions class for options.
+ */
+public class Sqoop extends Configured implements Tool {
+
+    public static final Log LOG = LogFactory.getLog(Sqoop.class.getName());
+    public static volatile AtomicReference<Job> job = new AtomicReference<>();
+    public static SqlManager sqlManager;
+    public static final String[] DEFAULT_FACTORY_CLASS_NAMES_ARR = {
+        OraOopManagerFactory.class.getName(), DefaultManagerFactory.class.getName(),
+    };
+    public static final String FACTORY_CLASS_NAMES_KEY = "sqoop.connection.factories";
+    public static final String METRICS_RUN_TIME = "MetricsRunTime";
+    private static Float progress = 0.0f;
+
+    /**
+     * If this System property is set, always throw an exception, do not just exit with status 1.
+     */
+    public static final String SQOOP_RETHROW_PROPERTY = "sqoop.throwOnError";
+
+    /** The option to specify an options file from which other options to the tool are read. */
+    public static final String SQOOP_OPTIONS_FILE_SPECIFIER = "--options-file";
+
+    static {
+        Configuration.addDefaultResource("sqoop-site.xml");
+    }
+
+    private SqoopTool tool;
+    private SqoopOptions options;
+    private String[] childPrgmArgs;
+
+    /**
+     * Creates a new instance of Sqoop set to run the supplied SqoopTool with the default
+     * configuration.
+     *
+     * @param tool the SqoopTool to run in the main body of Sqoop.
+     */
+    public Sqoop(SqoopTool tool) {
+        this(tool, (Configuration) null);
+    }
+
+    /**
+     * Creates a new instance of Sqoop set to run the supplied SqoopTool with the provided
+     * configuration.
+     *
+     * @param tool the SqoopTool to run in the main body of Sqoop.
+     * @param conf the Configuration to use (e.g., from ToolRunner).
+     */
+    public Sqoop(SqoopTool tool, Configuration conf) {
+        this(tool, conf, new SqoopOptions());
+    }
+
+    /**
+     * Creates a new instance of Sqoop set to run the supplied SqoopTool with the provided
+     * configuration and SqoopOptions.
+     *
+     * @param tool the SqoopTool to run in the main body of Sqoop.
+     * @param conf the Configuration to use (e.g., from ToolRunner).
+     * @param opts the SqoopOptions which control the tool's parameters.
+     */
+    public Sqoop(SqoopTool tool, Configuration conf, SqoopOptions opts) {
+        /*LOG.info("Running Sqoop version: " + new SqoopVersion().VERSION);*/
+
+        if (null != conf) {
+            setConf(conf);
+        }
+
+        this.options = opts;
+        this.options.setConf(getConf());
+
+        this.tool = tool;
+    }
+
+    /** @return the SqoopOptions used in this Sqoop instance. */
+    public SqoopOptions getOptions() {
+        return this.options;
+    }
+
+    /** @return the SqoopTool used in this Sqoop instance. */
+    public SqoopTool getTool() {
+        return this.tool;
+    }
+
+    @Override
+    /** Actual main entry-point for the program */
+    public int run(String[] args) {
+        if (options.getConf() == null) {
+            options.setConf(getConf());
+        }
+        options.getConf().setStrings(FACTORY_CLASS_NAMES_KEY, DEFAULT_FACTORY_CLASS_NAMES_ARR);
+        try {
+            options = tool.parseArguments(args, null, options, false);
+            tool.appendArgs(this.childPrgmArgs);
+            tool.validateOptions(options);
+            if (options.getVerbose()) {
+                LoggingUtils.setDebugLevel();
+            }
+        } catch (Exception e) {
+            LOG.error(e.getMessage(), e);
+            System.err.println(e.getMessage());
+            return 1;
+        }
+        return tool.run(options);
+    }
+
+    /**
+     * SqoopTools sometimes pass arguments to a child program (e.g., mysqldump). Users can specify
+     * additional args to these programs by preceeding the additional arguments with a standalone
+     * '--'; but ToolRunner/GenericOptionsParser will cull out this argument. We remove the
+     * child-program arguments in advance, and store them to be readded later.
+     *
+     * @param argv the argv in to the SqoopTool
+     * @return the argv with a "--" and any subsequent arguments removed.
+     */
+    private String[] stashChildPrgmArgs(String[] argv) {
+        for (int i = 0; i < argv.length; i++) {
+            if ("--".equals(argv[i])) {
+                this.childPrgmArgs = Arrays.copyOfRange(argv, i, argv.length);
+                return Arrays.copyOfRange(argv, 0, i);
+            }
+        }
+
+        // Didn't find child-program arguments.
+        return argv;
+    }
+
+    /**
+     * Given a Sqoop object and a set of arguments to deliver to its embedded SqoopTool, run the
+     * tool, wrapping the call to ToolRunner. This entry-point is preferred to ToolRunner.run()
+     * because it has a chance to stash child program arguments before GenericOptionsParser would
+     * remove them.
+     */
+    public static int runSqoop(Sqoop sqoop, String[] args) {
+        String[] toolArgs = sqoop.stashChildPrgmArgs(args);
+        try {
+            return ToolRunner.run(sqoop.getConf(), sqoop, toolArgs);
+        } catch (Exception e) {
+            LOG.error("Got exception running Sqoop: " + e.toString());
+            e.printStackTrace();
+            rethrowIfRequired(toolArgs, e);
+            return 1;
+        }
+    }
+
+    public static void rethrowIfRequired(String[] toolArgs, Exception ex) {
+        final RuntimeException exceptionToThrow;
+        if (ex instanceof RuntimeException) {
+            exceptionToThrow = (RuntimeException) ex;
+        } else {
+            exceptionToThrow = new RuntimeException(ex);
+        }
+
+        throw exceptionToThrow;
+    }
+
+    /**
+     * Entry-point that parses the correct SqoopTool to use from the args, but does not call
+     * System.exit() as main() will.
+     */
+    public static int runTool(Map<String, String> argsMap, Configuration conf) {
+
+        // Expand the options
+        String[] expandedArgs = null;
+        try {
+            String[] flatArgs = convertParamsMapToAarray(argsMap, conf);
+            expandedArgs = OptionsFileUtil.expandArguments(flatArgs);
+        } catch (Exception ex) {
+            LOG.error("Error while expanding arguments", ex);
+            System.err.println(ex.getMessage());
+            System.err.println("Try 'sqoop help' for usage.");
+            return 1;
+        }
+
+        String toolName = expandedArgs[0];
+        Configuration pluginConf = SqoopTool.loadPlugins(conf);
+        SqoopTool tool = SqoopTool.getTool(toolName);
+        if (null == tool) {
+            System.err.println("No such sqoop tool: " + toolName + ". See 'sqoop help'.");
+            return 1;
+        }
+
+        Sqoop sqoop = new Sqoop(tool, pluginConf);
+        return runSqoop(sqoop, Arrays.copyOfRange(expandedArgs, 1, expandedArgs.length));
+    }
+
+    private static String[] convertParamsMapToAarray(
+            Map<String, String> paramsMap, Configuration conf) throws Exception {
+        List<String> paramsList = new ArrayList<>();
+
+        for (Map.Entry<String, String> entry : paramsMap.entrySet()) {
+            if (StringUtils.isNotBlank(entry.getKey())) {
+                String key = entry.getKey().toLowerCase();
+                if (key.equals(SqoopParamsConfiguration.SQOOP_PARAM_MODE().getValue())) {
+                    paramsList.add(0, entry.getValue());
+                    continue;
+                }
+                if (key.startsWith(SqoopParamsConfiguration.SQOOP_PARAM_ENV_PREFIX().getValue())) {
+                    key =
+                            key.substring(
+                                    SqoopParamsConfiguration.SQOOP_PARAM_ENV_PREFIX()
+                                            .getValue()
+                                            .length());
+                    conf.set(key, entry.getValue());
+                    continue;
+                }
+                String conKey = ParamsMapping.mapping.get(key);
+                if (conKey != null) {
+                    if (entry.getValue() != null && entry.getValue().length() != 0) {
+                        paramsList.add(conKey);
+                        paramsList.add(entry.getValue());
+                    } else {
+                        paramsList.add(conKey);
+                    }
+                } else {
+                    // Ignore the unrecognized params
+                    LOG.warn("The Key " + entry.getKey() + " Is Not Supported");
+                }
+            }
+        }
+        return paramsList.toArray(new String[0]);
+    }
+
+    /**
+     * Entry-point that parses the correct SqoopTool to use from the args, but does not call
+     * System.exit() as main() will.
+     */
+    public static int runTool(Map<String, String> params) {
+        Configuration conf = new Configuration();
+        try {
+            for (String fileName :
+                    SqoopEnvConfiguration.SQOOP_HADOOP_SITE_FILE().getValue().split(";")) {
+                File resourceFile = Paths.get(fileName).toFile();
+                if (resourceFile.exists()) {
+                    LOG.info("Append resource: [" + resourceFile.getPath() + "] to configuration");
+                    conf.addResource(resourceFile.toURI().toURL());
+                }
+            }
+
+        } catch (MalformedURLException e) {
+            e.printStackTrace();
+            System.exit(1);
+        }
+        return runTool(params, conf);
+    }
+
+    public static int main(Map<String, String> code) {
+        return runTool(code);
+    }
+
+    /**
+     * Close method
+     *
+     * @throws JobClosableException
+     */
+    public static void close() throws JobClosableException {
+        Job runnableJob = job.get();
+        try {
+            if (Objects.nonNull(runnableJob)) {
+                runnableJob.killJob();
+            }
+            if (sqlManager != null && sqlManager.getConnection() != null) {
+                sqlManager.getConnection().close();
+            }
+        } catch (IllegalStateException se) {
+            if (isJobReady(runnableJob)) {
+                LOG.warn(
+                        "Unable to close the mapReduce job, it seems that the job isn't connected to the cluster");
+            } else if (Objects.nonNull(runnableJob)) {
+                String cluster = "UNKNOWN";
+                try {
+                    cluster = runnableJob.getCluster().getFileSystem().getCanonicalServiceName();
+                } catch (Exception e) {
+                    // Ignore
+                }
+                throw new JobClosableException(
+                        "Unable to close the mapReduce job related to cluster [" + cluster + "]",
+                        se);
+            }
+        } catch (IOException | SQLException e) {
+            throw new JobClosableException("Error in closing sqoop client", e);
+        }
+    }
+
+    /**
+     * Get application id
+     *
+     * @return string value
+     */
+    public static String getApplicationId() {
+        String applicationId = "";
+        try {
+            Job runnableJob = job.get();
+            if (Objects.nonNull(runnableJob)) {
+                JobID jobId = runnableJob.getJobID();
+                if (Objects.nonNull(jobId)) {
+                    applicationId = jobId.toString();
+                }
+            }
+        } catch (Exception e) {
+            // Not throw exception
+            LOG.error("GetApplicationId in sqoop Error", e);
+        }
+        return applicationId;
+    }
+
+    /**
+     * Get application url
+     *
+     * @return url
+     */
+    public static String getApplicationURL() {
+        String applicationUrl = "";
+        Job runnableJob = job.get();
+        try {
+            if (Objects.nonNull(runnableJob)) {
+                return runnableJob.getTrackingURL();
+            }
+        } catch (Exception e) {
+            if (e instanceof IllegalStateException && !isJobReady(runnableJob)) {
+                LOG.trace("The mapReduce job is not ready, wait for the job status to be Running");
+            } else {
+                LOG.error("GetApplicationURL in sqoop Error", e);
+            }
+        }
+        return applicationUrl;
+    }
+
+    /**
+     * Get progress value
+     *
+     * @return float value
+     */
+    public static Float progress() {
+        Job runnableJob = job.get();
+        try {
+            if (Objects.nonNull(runnableJob)) {
+                // Count by two paragraphs
+                progress = (runnableJob.mapProgress() + runnableJob.reduceProgress()) / 2.0f;
+            }
+        } catch (Exception e) {
+            if (e instanceof IllegalStateException && !isJobReady(runnableJob)) {
+                LOG.trace("The mapReduce job is not ready, the value of progress is 0.0 always");
+            } else {
+                LOG.error("Get progress in sqoop Error", e);
+            }
+        }
+        return progress;
+    }
+
+    /**
+     * Get progress info
+     *
+     * @return info
+     */
+    public static JobProgressInfo getProgressInfo() {
+        Job runnableJob = job.get();
+        try {
+            if (Objects.nonNull(runnableJob)) {
+                AtomicInteger totalTasks = new AtomicInteger();
+                AtomicInteger failedTasks = new AtomicInteger();
+                AtomicInteger runTasks = new AtomicInteger();
+                AtomicInteger successTasks = new AtomicInteger();
+                TaskType[] analyzeTypes = new TaskType[] {TaskType.MAP, TaskType.REDUCE};
+                for (TaskType taskType : analyzeTypes) {
+                    TaskReport[] taskReports = runnableJob.getTaskReports(taskType);
+                    Optional.ofNullable(taskReports)
+                            .ifPresent(
+                                    reports -> {
+                                        totalTasks.addAndGet(reports.length);
+                                        for (TaskReport report : reports) {
+                                            TIPStatus tipStatus = report.getCurrentStatus();
+                                            switch (tipStatus) {
+                                                case FAILED:
+                                                case KILLED:
+                                                    failedTasks.getAndIncrement();
+                                                    break;
+                                                case PENDING:
+                                                case RUNNING:
+                                                    runTasks.getAndIncrement();
+                                                    break;
+                                                case COMPLETE:
+                                                    successTasks.getAndIncrement();
+                                                    break;
+                                                default:
+                                            }
+                                        }
+                                    });
+                }
+                return new JobProgressInfo(
+                        getApplicationId(),
+                        totalTasks.get(),
+                        runTasks.get(),
+                        failedTasks.get(),
+                        successTasks.get());
+            }
+        } catch (Exception e) {
+            if (e instanceof IllegalStateException && !isJobReady(runnableJob)) {
+                LOG.trace(
+                        "The mapReduce job is not ready, the value of progressInfo is always empty");
+            } else {
+                LOG.error("Get progress info in sqoop Error", e);
+            }
+        }
+        return new JobProgressInfo(getApplicationId(), 0, 0, 0, 0);
+    }
+
+    /**
+     * Get metrics
+     *
+     * @return metrics map
+     */
+    public static Map<String, Object> getMetrics() {
+        Job runnableJob = job.get();
+        // Actual the counter map
+        Map<String, Object> metricsMap = new HashMap<>();
+        try {
+            if (Objects.nonNull(runnableJob)) {
+                Counters counters = runnableJob.getCounters();
+                counters.forEach(
+                        group ->
+                                metricsMap.computeIfAbsent(
+                                        group.getName(),
+                                        (groupName) -> {
+                                            Map<String, Object> map = new HashMap<>();
+                                            group.forEach(
+                                                    counter ->
+                                                            map.put(
+                                                                    counter.getName(),
+                                                                    counter.getValue()));
+                                            return map;
+                                        }));
+                long startTime = runnableJob.getStartTime();
+                long endTime =
+                        runnableJob.getFinishTime() > 0
+                                ? runnableJob.getFinishTime()
+                                : System.currentTimeMillis();
+                // Analyze the run time
+                metricsMap.put(METRICS_RUN_TIME, startTime > 0 ? endTime - startTime : 0);
+            }
+        } catch (Exception e) {
+            if (e instanceof IllegalStateException && !isJobReady(runnableJob)) {
+                LOG.trace(
+                        "The mapReduce job is not ready, the value of metricsMap is always empty");
+            } else {
+                LOG.error("Get metrics info in sqoop Error", e);
+            }
+        }
+        return metricsMap;
+    }
+
+    /**
+     * Get diagnosis
+     *
+     * @return
+     */
+    public static Map<String, Object> getDiagnosis() {
+        Job runnableJob = job.get();
+        Map<String, Object> diagnosis = new HashMap<>();
+        try {
+            if (Objects.nonNull(runnableJob)) {
+                TaskType[] analyzeTypes = new TaskType[] {TaskType.MAP, TaskType.REDUCE};
+                List<TaskReport> listReports = new ArrayList<>();
+                for (TaskType taskType : analyzeTypes) {
+                    listReports.addAll(Arrays.asList(runnableJob.getTaskReports(taskType)));
+                }
+                listReports.forEach(
+                        report -> diagnosis.put(report.getTaskId(), report.getDiagnostics()));
+            }
+        } catch (Exception e) {
+            if (e instanceof IllegalStateException && !isJobReady(runnableJob)) {
+                LOG.trace("The mapReduce job is not ready, the value of diagnosis is always empty");
+            } else {
+                LOG.error("Get diagnosis info in sqoop Error", e);
+            }
+        }
+        return diagnosis;
+    }
+
+    /**
+     * If the job is ready
+     *
+     * @param runnableJob job
+     * @return
+     */
+    private static boolean isJobReady(Job runnableJob) {
+        boolean ready = false;
+        try {
+            Field stateField = Job.class.getDeclaredField("state");
+            stateField.setAccessible(true);
+            Job.JobState state = (Job.JobState) stateField.get(runnableJob);
+            ready = state.equals(Job.JobState.RUNNING);
+        } catch (NoSuchFieldException | IllegalAccessException e) {
+            // Ignore
+        }
+        return ready;
+    }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ExecutionContext.java b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ExecutionContext.java
new file mode 100644
index 000000000..4dde08a76
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ExecutionContext.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.client.config;
+
+import org.apache.linkis.engineconn.common.creation.EngineCreationContext;
+
+public class ExecutionContext {
+    private final EngineCreationContext environmentContext;
+
+    public ExecutionContext(EngineCreationContext environmentContext) {
+        this.environmentContext = environmentContext;
+    }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java
new file mode 100644
index 000000000..d52d68176
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.client.config;
+
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopParamsConfiguration;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/** Params mapping */
+public final class ParamsMapping {
+    public static Map<String, String> mapping;
+
+    static {
+        String paramPrefix = SqoopParamsConfiguration.SQOOP_PARAM_PREFIX().getValue();
+        mapping = new HashMap<>();
+        mapping.put(paramPrefix + "connect", "--connect");
+        mapping.put(paramPrefix + "connection.manager", "--connection-manager");
+        mapping.put(paramPrefix + "connection.param.file", "--connection-param-file");
+        mapping.put(paramPrefix + "driver", "--driver");
+        mapping.put(paramPrefix + "hadoop.home", "--hadoop-home");
+        mapping.put(paramPrefix + "hadoop.mapred.home", "--hadoop-mapred-home");
+        mapping.put(paramPrefix + "help", "help");
+        mapping.put(paramPrefix + "password", "--password");
+        mapping.put(paramPrefix + "password.alias", "--password-alias");
+        mapping.put(paramPrefix + "password.file", "--password-file");
+        mapping.put(paramPrefix + "relaxed.isolation", "--relaxed-isolation");
+        mapping.put(paramPrefix + "skip.dist.cache", "--skip-dist-cache");
+        mapping.put(paramPrefix + "username", "--username");
+        mapping.put(paramPrefix + "verbose", "--verbose");
+        mapping.put(paramPrefix + "append", "--append");
+        mapping.put(paramPrefix + "as.avrodatafile", "--as-avrodatafile");
+        mapping.put(paramPrefix + "as.parquetfile", "--as-parquetfile");
+        mapping.put(paramPrefix + "as.sequencefile", "--as-sequencefile");
+        mapping.put(paramPrefix + "as.textfile", "--as-textfile");
+        mapping.put(paramPrefix + "autoreset.to.one.mapper", "--autoreset-to-one-mapper");
+        mapping.put(paramPrefix + "boundary.query", "--boundary-query");
+        mapping.put(paramPrefix + "case.insensitive", "--case-insensitive");
+        mapping.put(paramPrefix + "columns", "--columns");
+        mapping.put(paramPrefix + "compression.codec", "--compression-codec");
+        mapping.put(paramPrefix + "delete.target.dir", "--delete-target-dir");
+        mapping.put(paramPrefix + "direct", "--direct");
+        mapping.put(paramPrefix + "direct.split.size", "--direct-split-size");
+        mapping.put(paramPrefix + "query", "--query");
+        mapping.put(paramPrefix + "fetch.size", "--fetch-size");
+        mapping.put(paramPrefix + "inline.lob.limit", "--inline-lob-limit");
+        mapping.put(paramPrefix + "num.mappers", "--num-mappers");
+        mapping.put(paramPrefix + "mapreduce.job.name", "--mapreduce-job-name");
+        mapping.put(paramPrefix + "merge.key", "--merge-key");
+        mapping.put(paramPrefix + "split.by", "--split-by");
+        mapping.put(paramPrefix + "table", "--table");
+        mapping.put(paramPrefix + "target.dir", "--target-dir");
+        mapping.put(paramPrefix + "validate", "--validate");
+        mapping.put(paramPrefix + "validation.failurehandler", "--validation-failurehandler");
+        mapping.put(paramPrefix + "validation.threshold", " --validation-threshold");
+        mapping.put(paramPrefix + "validator", "--validator");
+        mapping.put(paramPrefix + "warehouse.dir", "--warehouse-dir");
+        mapping.put(paramPrefix + "where", "--where");
+        mapping.put(paramPrefix + "compress", "--compress");
+        mapping.put(paramPrefix + "check.column", "--check-column");
+        mapping.put(paramPrefix + "incremental", "--incremental");
+        mapping.put(paramPrefix + "last.value", "--last-value");
+        mapping.put(paramPrefix + "enclosed.by", "--enclosed-by");
+        mapping.put(paramPrefix + "escaped.by", "--escaped-by");
+        mapping.put(paramPrefix + "fields.terminated.by", "--fields-terminated-by");
+        mapping.put(paramPrefix + "lines.terminated.by", "--lines-terminated-by");
+        mapping.put(paramPrefix + "mysql.delimiters", "--mysql-delimiters");
+        mapping.put(paramPrefix + "optionally.enclosed.by", "--optionally-enclosed-by");
+        mapping.put(paramPrefix + "input.enclosed.by", "--input-enclosed-by");
+        mapping.put(paramPrefix + "input.escaped.by", "--input-escaped-by");
+        mapping.put(paramPrefix + "input.fields.terminated.by", "--input-fields-terminated-by");
+        mapping.put(paramPrefix + "input.lines.terminated.by", "--input-lines-terminated-by");
+        mapping.put(paramPrefix + "input.optionally.enclosed.by", "--input-optionally-enclosed-by");
+        mapping.put(paramPrefix + "create.hive.table", "--create-hive-table");
+        mapping.put(paramPrefix + "hive.delims.replacement", "--hive-delims-replacement");
+        mapping.put(paramPrefix + "hive.database", "--hive-database");
+        mapping.put(paramPrefix + "hive.drop.import.delims", "--hive-drop-import-delims");
+        mapping.put(paramPrefix + "hive.home", "--hive-home");
+        mapping.put(paramPrefix + "hive.import", "--hive-import");
+        mapping.put(paramPrefix + "hive.overwrite", "--hive-overwrite");
+        mapping.put(paramPrefix + "hive.partition.value", "--hive-partition-value");
+        mapping.put(paramPrefix + "hive.table", "--hive-table");
+        mapping.put(paramPrefix + "column.family", "--column-family");
+        mapping.put(paramPrefix + "hbase.bulkload", "--hbase-bulkload");
+        mapping.put(paramPrefix + "hbase.create.table", "--hbase-create-table");
+        mapping.put(paramPrefix + "hbase.row.key", "--hbase-row-key");
+        mapping.put(paramPrefix + "hbase.table", "--hbase-table");
+        mapping.put(paramPrefix + "hcatalog.database", "--hcatalog-database");
+        mapping.put(paramPrefix + "hcatalog.home", "--hcatalog-home");
+        mapping.put(paramPrefix + "hcatalog.partition.keys", "--hcatalog-partition-keys");
+        mapping.put(paramPrefix + "hcatalog.partition.values", "--hcatalog-partition-values");
+        mapping.put(paramPrefix + "hcatalog.table", "--hcatalog-table");
+        mapping.put(paramPrefix + "hive.partition.key", "--hive-partition-key");
+        mapping.put(paramPrefix + "map.column.hive", "--map-column-hive");
+        mapping.put(paramPrefix + "create.hcatalog.table", "--create-hcatalog-table");
+        mapping.put(paramPrefix + "hcatalog.storage.stanza", "--hcatalog-storage-stanza");
+        mapping.put(paramPrefix + "accumulo.batch.size", "--accumulo-batch-size");
+        mapping.put(paramPrefix + "accumulo.column.family", "--accumulo-column-family");
+        mapping.put(paramPrefix + "accumulo.create.table", "--accumulo-create-table");
+        mapping.put(paramPrefix + "accumulo.instance", "--accumulo-instance");
+        mapping.put(paramPrefix + "accumulo.max.latency", "--accumulo-max-latency");
+        mapping.put(paramPrefix + "accumulo.password", "--accumulo-password");
+        mapping.put(paramPrefix + "accumulo.row.key", "--accumulo-row-key");
+        mapping.put(paramPrefix + "accumulo.table", "--accumulo-table");
+        mapping.put(paramPrefix + "accumulo.user", "--accumulo-user");
+        mapping.put(paramPrefix + "accumulo.visibility", "--accumulo-visibility");
+        mapping.put(paramPrefix + "accumulo.zookeepers", "--accumulo-zookeepers");
+        mapping.put(paramPrefix + "bindir", "--bindir");
+        mapping.put(paramPrefix + "class.name", "--class-name");
+        mapping.put(paramPrefix + "input.null.non.string", "--input-null-non-string");
+        mapping.put(paramPrefix + "input.null.string", "--input-null-string");
+        mapping.put(paramPrefix + "jar.file", "--jar-file");
+        mapping.put(paramPrefix + "map.column.java", "--map-column-java");
+        mapping.put(paramPrefix + "null.non.string", "--null-non-string");
+        mapping.put(paramPrefix + "null.string", "--null-string");
+        mapping.put(paramPrefix + "outdir", "--outdir");
+        mapping.put(paramPrefix + "package.name", "--package-name");
+        mapping.put(paramPrefix + "conf", "-conf");
+        mapping.put(paramPrefix + "D", "-D");
+        mapping.put(paramPrefix + "fs", "-fs");
+        mapping.put(paramPrefix + "jt", "-jt");
+        mapping.put(paramPrefix + "files", "-files");
+        mapping.put(paramPrefix + "libjars", "-libjars");
+        mapping.put(paramPrefix + "archives", "-archives");
+        mapping.put(paramPrefix + "update.key", "--update-key");
+        mapping.put(paramPrefix + "update.mode", "--update-mode");
+        mapping.put(paramPrefix + "export.dir", "--export-dir");
+    }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java
new file mode 100644
index 000000000..efbcd0e6b
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobClosableException.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.client.exception;
+
+import org.apache.linkis.common.exception.ErrorException;
+
+/** Exception in closing/destroying the job */
+public class JobClosableException extends ErrorException {
+    private static final long serialVersionUID = 1L;
+
+    public static final int ERROR_CODE = 16025;
+
+    public JobClosableException(String message) {
+        super(ERROR_CODE, message);
+    }
+
+    public JobClosableException(String message, Throwable e) {
+        super(ERROR_CODE, message);
+        this.initCause(e);
+    }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java
new file mode 100644
index 000000000..168d54ff3
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/exception/JobExecutionException.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.client.exception;
+
+import org.apache.linkis.common.exception.ErrorException;
+
+public class JobExecutionException extends ErrorException {
+    private static final long serialVersionUID = 1L;
+
+    public static final int ERROR_CODE = 16023;
+
+    public JobExecutionException(String message) {
+        super(ERROR_CODE, message);
+    }
+
+    public JobExecutionException(String message, Throwable e) {
+        super(ERROR_CODE, message);
+        this.initCause(e);
+    }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/utils/JarLoader.java b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/utils/JarLoader.java
new file mode 100644
index 000000000..6140e68e8
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/utils/JarLoader.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.client.utils;
+
+import org.apache.commons.lang3.Validate;
+
+import java.io.File;
+import java.io.FileFilter;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.security.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+public class JarLoader extends URLClassLoader {
+    private AccessControlContext acc;
+
+    public JarLoader(String[] paths) {
+        this(paths, false);
+    }
+
+    public JarLoader(String[] paths, boolean recursive) {
+        this(paths, recursive, JarLoader.class.getClassLoader());
+    }
+
+    public JarLoader(String[] paths, boolean recursive, ClassLoader parent) {
+        super(getURLs(paths, recursive), parent);
+    }
+
+    private static URL[] getURLs(String[] paths, boolean recursive) {
+        List<URL> urls = new ArrayList<>();
+        if (recursive) {
+            List<String> dirs = new ArrayList<>();
+            for (String path : paths) {
+                dirs.add(path);
+                collectDirs(path, dirs);
+            }
+            for (String path : dirs) {
+                urls.addAll(doGetURLs(path));
+            }
+        } else {
+            // For classpath, classloader will recursive automatically
+            urls.addAll(
+                    Arrays.stream(paths)
+                            .map(File::new)
+                            .filter(File::exists)
+                            .map(
+                                    f -> {
+                                        try {
+                                            return f.toURI().toURL();
+                                        } catch (MalformedURLException e) {
+                                            // Ignore
+                                            return null;
+                                        }
+                                    })
+                            .collect(Collectors.toList()));
+        }
+        return urls.toArray(new URL[0]);
+    }
+
+    public void addJarURL(String path) {
+        // Single jar
+        File singleJar = new File(path);
+        if (singleJar.exists() && singleJar.isFile()) {
+            try {
+                this.addURL(singleJar.toURI().toURL());
+            } catch (MalformedURLException e) {
+                // Ignore
+            }
+        }
+    }
+
+    private static void collectDirs(String path, List<String> collector) {
+
+        File current = new File(path);
+        if (!current.exists() || !current.isDirectory()) {
+            return;
+        }
+
+        if (null != current.listFiles()) {
+            for (File child : Objects.requireNonNull(current.listFiles())) {
+                if (!child.isDirectory()) {
+                    continue;
+                }
+
+                collector.add(child.getAbsolutePath());
+                collectDirs(child.getAbsolutePath(), collector);
+            }
+        }
+    }
+
+    private static List<URL> doGetURLs(final String path) {
+
+        File jarPath = new File(path);
+
+        Validate.isTrue(jarPath.exists() && jarPath.isDirectory(), "jar包路径必须存在且为目录.");
+
+        /* set filter */
+        FileFilter jarFilter = pathname -> pathname.getName().endsWith(".jar");
+
+        /* iterate all jar */
+        File[] allJars = new File(path).listFiles(jarFilter);
+        assert allJars != null;
+        List<URL> jarURLs = new ArrayList<>(allJars.length);
+
+        for (File allJar : allJars) {
+            try {
+                jarURLs.add(allJar.toURI().toURL());
+            } catch (Exception e) {
+                // Ignore
+            }
+        }
+
+        return jarURLs;
+    }
+
+    /**
+     * change the order to load class
+     *
+     * @param name name
+     * @param resolve isResolve
+     * @return
+     * @throws ClassNotFoundException
+     */
+    @Override
+    public Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
+        synchronized (getClassLoadingLock(name)) {
+            // First, check if the class has already been loaded
+            Class<?> c = findLoadedClass(name);
+            if (c == null) {
+                long t0 = System.nanoTime();
+                try {
+                    // invoke findClass in this class
+                    c = findClass(name);
+                } catch (ClassNotFoundException e) {
+                    // ClassNotFoundException thrown if class not found
+                }
+                if (c == null) {
+                    return super.loadClass(name, resolve);
+                }
+                // For compatibility with higher versions > java 1.8.0_141
+                //                sun.misc.PerfCounter.getFindClasses().addElapsedTimeFrom(t0);
+                //                sun.misc.PerfCounter.getFindClasses().increment();
+            }
+            if (resolve) {
+                resolveClass(c);
+            }
+            return c;
+        }
+    }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/sqoop/mapreduce/JobBase.java b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/sqoop/mapreduce/JobBase.java
new file mode 100644
index 000000000..6703c303c
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/java/org/apache/sqoop/mapreduce/JobBase.java
@@ -0,0 +1,410 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sqoop.mapreduce;
+
+import org.apache.linkis.engineconnplugin.sqoop.client.Sqoop;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.sqoop.config.ConfigurationConstants;
+
+import com.cloudera.sqoop.SqoopOptions;
+import com.cloudera.sqoop.config.ConfigurationHelper;
+import com.cloudera.sqoop.manager.ConnManager;
+import com.cloudera.sqoop.tool.SqoopTool;
+import com.cloudera.sqoop.util.ClassLoaderStack;
+import com.cloudera.sqoop.util.Jars;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Base class for configuring and running a MapReduce job. Allows dependency injection, etc, for
+ * easy customization of import job types.
+ */
+public class JobBase {
+
+    public static final Log LOG = LogFactory.getLog(JobBase.class.getName());
+
+    public static final String SERIALIZE_SQOOPOPTIONS = "sqoop.jobbase.serialize.sqoopoptions";
+    public static final boolean SERIALIZE_SQOOPOPTIONS_DEFAULT = false;
+    public static final String HADOOP_MAP_TASK_MAX_ATTEMTPS = "mapreduce.map.maxattempts";
+    public static final String HADOOP_REDUCE_TASK_MAX_ATTEMTPS = "mapreduce.reduce.maxattempts";
+
+    protected SqoopOptions options;
+    protected Class<? extends Mapper> mapperClass;
+    protected Class<? extends InputFormat> inputFormatClass;
+    protected Class<? extends OutputFormat> outputFormatClass;
+
+    private Job mrJob;
+
+    private ClassLoader prevClassLoader = null;
+    protected final boolean isHCatJob;
+
+    public static final String PROPERTY_VERBOSE = "sqoop.verbose";
+
+    public JobBase() {
+        this(null);
+    }
+
+    public JobBase(final SqoopOptions opts) {
+        this(opts, null, null, null);
+    }
+
+    public JobBase(
+            final SqoopOptions opts,
+            final Class<? extends Mapper> mapperClass,
+            final Class<? extends InputFormat> inputFormatClass,
+            final Class<? extends OutputFormat> outputFormatClass) {
+        System.out.println(SqoopOptions.class.getClassLoader());
+        this.options = opts;
+        this.mapperClass = mapperClass;
+        this.inputFormatClass = inputFormatClass;
+        this.outputFormatClass = outputFormatClass;
+        isHCatJob = options.getHCatTableName() != null;
+    }
+
+    /** @return the mapper class to use for the job. */
+    protected Class<? extends Mapper> getMapperClass() throws ClassNotFoundException {
+        return this.mapperClass;
+    }
+
+    /** @return the inputformat class to use for the job. */
+    protected Class<? extends InputFormat> getInputFormatClass() throws ClassNotFoundException {
+        return this.inputFormatClass;
+    }
+
+    /** @return the outputformat class to use for the job. */
+    protected Class<? extends OutputFormat> getOutputFormatClass() throws ClassNotFoundException {
+        return this.outputFormatClass;
+    }
+
+    /** Set the OutputFormat class to use for this job. */
+    public void setOutputFormatClass(Class<? extends OutputFormat> cls) {
+        this.outputFormatClass = cls;
+    }
+
+    /** Set the InputFormat class to use for this job. */
+    public void setInputFormatClass(Class<? extends InputFormat> cls) {
+        this.inputFormatClass = cls;
+    }
+
+    /** Set the Mapper class to use for this job. */
+    public void setMapperClass(Class<? extends Mapper> cls) {
+        this.mapperClass = cls;
+    }
+
+    /** Set the SqoopOptions configuring this job. */
+    public void setOptions(SqoopOptions opts) {
+        this.options = opts;
+    }
+
+    /**
+     * Put jar files required by Sqoop into the DistributedCache.
+     *
+     * @param job the Job being submitted.
+     * @param mgr the ConnManager to use.
+     */
+    protected void cacheJars(Job job, ConnManager mgr) throws IOException {
+        if (options.isSkipDistCache()) {
+            LOG.info("Not adding sqoop jars to distributed cache as requested");
+            return;
+        }
+
+        Configuration conf = job.getConfiguration();
+        FileSystem fs = FileSystem.getLocal(conf);
+        Set<String> localUrls = new HashSet<String>();
+
+        addToCache(Jars.getSqoopJarPath(), fs, localUrls);
+        if (null != mgr) {
+            addToCache(Jars.getDriverClassJar(mgr), fs, localUrls);
+            addToCache(Jars.getJarPathForClass(mgr.getClass()), fs, localUrls);
+        }
+
+        SqoopTool tool = this.options.getActiveSqoopTool();
+        if (null != tool) {
+            // Make sure the jar for the tool itself is on the classpath. (In case
+            // this is a third-party plugin tool.)
+            addToCache(Jars.getJarPathForClass(tool.getClass()), fs, localUrls);
+            List<String> toolDeps = tool.getDependencyJars();
+            if (null != toolDeps) {
+                for (String depFile : toolDeps) {
+                    addToCache(depFile, fs, localUrls);
+                }
+            }
+        }
+
+        // If the user specified a particular jar file name,
+
+        // Add anything in $SQOOP_HOME/lib, if this is set.
+        String sqoopHome = System.getenv("SQOOP_HOME");
+        if (null != sqoopHome) {
+            File sqoopHomeFile = new File(sqoopHome);
+            File sqoopLibFile = new File(sqoopHomeFile, "lib");
+            if (sqoopLibFile.exists()) {
+                addDirToCache(sqoopLibFile, fs, localUrls);
+            }
+        } else {
+            LOG.warn("SQOOP_HOME is unset. May not be able to find " + "all job dependencies.");
+        }
+
+        // If the user run import into hive as Parquet file,
+        // Add anything in $HIVE_HOME/lib.
+        if (options.doHiveImport()
+                && (options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile)) {
+            String hiveHome = options.getHiveHome();
+            if (null != hiveHome) {
+                File hiveHomeFile = new File(hiveHome);
+                File hiveLibFile = new File(hiveHomeFile, "lib");
+                if (hiveLibFile.exists()) {
+                    addDirToCache(hiveLibFile, fs, localUrls);
+                }
+            } else {
+                LOG.warn("HIVE_HOME is unset. Cannot add hive libs as dependencies.");
+            }
+        }
+
+        String tmpjars = conf.get(ConfigurationConstants.MAPRED_DISTCACHE_CONF_PARAM);
+        StringBuilder sb = new StringBuilder();
+
+        // If we didn't put anything in our set, then there's nothing to cache.
+        if (localUrls.isEmpty() && (org.apache.commons.lang.StringUtils.isEmpty(tmpjars))) {
+            return;
+        }
+
+        if (null != tmpjars) {
+            String[] tmpjarsElements = tmpjars.split(",");
+            for (String jarElement : tmpjarsElements) {
+                if (jarElement.isEmpty()) {
+                    warn("Empty input is invalid and was removed from tmpjars.");
+                } else {
+                    sb.append(jarElement);
+                    sb.append(",");
+                }
+            }
+        }
+
+        int lastComma = sb.lastIndexOf(",");
+        if (localUrls.isEmpty() && lastComma >= 0) {
+            sb.deleteCharAt(lastComma);
+        }
+
+        // Add these to the 'tmpjars' array, which the MR JobSubmitter
+        // will upload to HDFS and put in the DistributedCache libjars.
+        sb.append(StringUtils.arrayToString(localUrls.toArray(new String[0])));
+        conf.set(ConfigurationConstants.MAPRED_DISTCACHE_CONF_PARAM, sb.toString());
+    }
+
+    protected void warn(String message) {
+        LOG.warn(message);
+    }
+
+    private void addToCache(String file, FileSystem fs, Set<String> localUrls) {
+        if (null == file) {
+            return;
+        }
+
+        Path p = new Path(file);
+        String qualified = p.makeQualified(fs).toString();
+        LOG.debug("Adding to job classpath: " + qualified);
+        localUrls.add(qualified);
+    }
+
+    /** Add the .jar elements of a directory to the DCache classpath, nonrecursively. */
+    private void addDirToCache(File dir, FileSystem fs, Set<String> localUrls) {
+        if (null == dir) {
+            return;
+        }
+
+        for (File libfile : dir.listFiles()) {
+            if (libfile.exists() && !libfile.isDirectory() && libfile.getName().endsWith("jar")) {
+                addToCache(libfile.toString(), fs, localUrls);
+            }
+        }
+    }
+
+    /** If jars must be loaded into the local environment, do so here. */
+    protected void loadJars(Configuration conf, String ormJarFile, String tableClassName)
+            throws IOException {
+
+        boolean isLocal =
+                "local".equals(conf.get("mapreduce.jobtracker.address"))
+                        || "local".equals(conf.get("mapred.job.tracker"));
+        if (isLocal) {
+            // If we're using the LocalJobRunner, then instead of using the compiled
+            // jar file as the job source, we're running in the current thread. Push
+            // on another classloader that loads from that jar in addition to
+            // everything currently on the classpath.
+            this.prevClassLoader = ClassLoaderStack.addJarFile(ormJarFile, tableClassName);
+        }
+    }
+
+    /** If any classloader was invoked by loadJars, free it here. */
+    protected void unloadJars() {
+        if (null != this.prevClassLoader) {
+            // unload the special classloader for this jar.
+            ClassLoaderStack.setCurrentClassLoader(this.prevClassLoader);
+        }
+    }
+
+    /** Configure the inputformat to use for the job. */
+    protected void configureInputFormat(
+            Job job, String tableName, String tableClassName, String splitByCol)
+            throws ClassNotFoundException, IOException {
+        // TODO: 'splitByCol' is import-job specific; lift it out of this API.
+        Class<? extends InputFormat> ifClass = getInputFormatClass();
+        LOG.debug("Using InputFormat: " + ifClass);
+        job.setInputFormatClass(ifClass);
+    }
+
+    /** Configure the output format to use for the job. */
+    protected void configureOutputFormat(Job job, String tableName, String tableClassName)
+            throws ClassNotFoundException, IOException {
+        Class<? extends OutputFormat> ofClass = getOutputFormatClass();
+        LOG.debug("Using OutputFormat: " + ofClass);
+        job.setOutputFormatClass(ofClass);
+    }
+
+    /**
+     * Set the mapper class implementation to use in the job, as well as any related configuration
+     * (e.g., map output types).
+     */
+    protected void configureMapper(Job job, String tableName, String tableClassName)
+            throws ClassNotFoundException, IOException {
+        job.setMapperClass(getMapperClass());
+    }
+
+    /**
+     * Configure the number of map/reduce tasks to use in the job, returning the number of map tasks
+     * for backward compatibility.
+     */
+    protected int configureNumTasks(Job job) throws IOException {
+        int numMapTasks = configureNumMapTasks(job);
+        configureNumReduceTasks(job);
+        return numMapTasks;
+    }
+
+    /** Configure the number of map tasks to use in the job. */
+    protected int configureNumMapTasks(Job job) throws IOException {
+        int numMapTasks = options.getNumMappers();
+        if (numMapTasks < 1) {
+            numMapTasks = SqoopOptions.DEFAULT_NUM_MAPPERS;
+            LOG.warn("Invalid mapper count; using " + numMapTasks + " mappers.");
+        }
+        ConfigurationHelper.setJobNumMaps(job, numMapTasks);
+        return numMapTasks;
+    }
+
+    /** Configure the number of reduce tasks to use in the job. */
+    protected int configureNumReduceTasks(Job job) throws IOException {
+        job.setNumReduceTasks(0);
+        return 0;
+    }
+
+    /** Set the main job that will be run. */
+    protected void setJob(Job job) {
+        LOG.info("Customize JobBase Set The Job");
+        mrJob = job;
+        Sqoop.job.set(job);
+    }
+
+    /** @return the main MapReduce job that is being run, or null if no job has started. */
+    public Job getJob() {
+        return mrJob;
+    }
+
+    /**
+     * Create new Job object in unified way for all types of jobs.
+     *
+     * @param configuration Hadoop configuration that should be used
+     * @return New job object, created object won't be persisted in the instance
+     */
+    public Job createJob(Configuration configuration) throws IOException {
+        // Put the SqoopOptions into job if requested
+        if (configuration.getBoolean(SERIALIZE_SQOOPOPTIONS, SERIALIZE_SQOOPOPTIONS_DEFAULT)) {
+            putSqoopOptionsToConfiguration(options, configuration);
+        }
+
+        return new Job(configuration);
+    }
+
+    /**
+     * Iterates over serialized form of SqoopOptions and put them into Configuration object.
+     *
+     * @param opts SqoopOptions that should be serialized
+     * @param configuration Target configuration object
+     */
+    public void putSqoopOptionsToConfiguration(SqoopOptions opts, Configuration configuration) {
+        for (Map.Entry<Object, Object> e : opts.writeProperties().entrySet()) {
+            String key = (String) e.getKey();
+            String value = (String) e.getValue();
+
+            // We don't need to do if(value is empty) because that is already done
+            // for us by the SqoopOptions.writeProperties() method.
+            configuration.set("sqoop.opt." + key, value);
+        }
+    }
+
+    /** Actually run the MapReduce job. */
+    protected boolean runJob(Job job)
+            throws ClassNotFoundException, IOException, InterruptedException {
+        return job.waitForCompletion(true);
+    }
+
+    /**
+     * Display a notice on the log that the current MapReduce job has been retired, and thus
+     * Counters are unavailable.
+     *
+     * @param log the Log to display the info to.
+     */
+    protected void displayRetiredJobNotice(Log log) {
+        log.info("The MapReduce job has already been retired. Performance");
+        log.info("counters are unavailable. To get this information, ");
+        log.info("you will need to enable the completed job store on ");
+        log.info("the jobtracker with:");
+        log.info("mapreduce.jobtracker.persist.jobstatus.active = true");
+        log.info("mapreduce.jobtracker.persist.jobstatus.hours = 1");
+        log.info("A jobtracker restart is required for these settings");
+        log.info("to take effect.");
+    }
+
+    /**
+     * Save interesting options to constructed job. Goal here is to propagate some of them to the
+     * job itself, so that they can be easily accessed. We're propagating only interesting global
+     * options (like verbose flag).
+     *
+     * @param job Destination job to save options
+     */
+    protected void propagateOptionsToJob(Job job) {
+        Configuration configuration = job.getConfiguration();
+
+        // So far, propagate only verbose flag
+        configuration.setBoolean(PROPERTY_VERBOSE, options.getVerbose());
+    }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/resources/linkis-engineconn.properties b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/resources/linkis-engineconn.properties
new file mode 100644
index 000000000..99b76eaea
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/resources/linkis-engineconn.properties
@@ -0,0 +1,22 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+wds.linkis.server.version=v1
+
+wds.linkis.engineconn.plugin.default.class=org.apache.linkis.engineconnplugin.sqoop.SqoopEngineConnPlugin
+
+wds.linkis.engine.connector.hooks=org.apache.linkis.engineconn.computation.executor.hook.ComputationEngineConnHook
+# wds.linkis.hadoop.site.xml=
\ No newline at end of file
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/resources/log4j2.xml
new file mode 100644
index 000000000..3b45ae2a1
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/resources/log4j2.xml
@@ -0,0 +1,82 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<configuration status="error" monitorInterval="30">
+    <appenders>
+        <Console name="Console" target="SYSTEM_OUT">
+            <ThresholdFilter level="INFO" onMatch="ACCEPT" onMismatch="DENY"/>
+            <PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
+        </Console>
+
+        <Send name="Send" >
+            <Filters>
+                <ThresholdFilter level="WARN" onMatch="ACCEPT" onMismatch="DENY" />
+            </Filters>
+            <PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
+        </Send>
+
+        <File name="stderr" fileName="${env:PWD}/logs/stderr" append="true">
+            <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>
+        </File>
+    </appenders>
+    <loggers>
+        <root level="INFO">
+            <!--<appender-ref ref="RollingFile"/>-->
+            <appender-ref ref="Console"/>
+            <appender-ref ref="Send"/>
+        </root>
+        <logger name="org.springframework.boot.diagnostics.LoggingFailureAnalysisReporter " level="error" additivity="true">
+            <appender-ref ref="stderr"/>
+        </logger>
+        <logger name="com.netflix.discovery" level="warn" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+        <logger name="org.apache.hadoop.yarn" level="warn" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+        <logger name="org.springframework" level="warn" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+        <logger name="org.apache.linkis.server.security" level="warn" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+        <logger name="org.apache.hadoop.hive.ql.exec.mr.ExecDriver" level="warn" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+        <logger name="org.apache.hadoop.hdfs.KeyProviderCache" level="fatal" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+        <logger name="org.spark_project.jetty" level="ERROR" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+        <logger name="org.eclipse.jetty" level="ERROR" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+        <logger name="org.springframework" level="ERROR" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+        <logger name="org.reflections.Reflections" level="ERROR" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+
+        <logger name="org.apache.hadoop.ipc.Client" level="ERROR" additivity="true">
+            <appender-ref ref="Send"/>
+        </logger>
+
+    </loggers>
+</configuration>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala
new file mode 100644
index 000000000..52266bb71
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/SqoopEngineConnPlugin.scala
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop
+
+import org.apache.linkis.manager.engineplugin.common.EngineConnPlugin
+import org.apache.linkis.manager.engineplugin.common.creation.EngineConnFactory
+import org.apache.linkis.manager.engineplugin.common.launch.EngineConnLaunchBuilder
+import org.apache.linkis.manager.engineplugin.common.resource.{EngineResourceFactory, GenericEngineResourceFactory}
+import org.apache.linkis.manager.label.entity.Label
+import org.apache.linkis.engineconnplugin.sqoop.factory.SqoopEngineConnFactory
+import org.apache.linkis.engineconnplugin.sqoop.launch.SqoopEngineConnLaunchBuilder
+
+
+class SqoopEngineConnPlugin extends EngineConnPlugin{
+  private val EP_CONTEXT_CONSTRUCTOR_LOCK = new Object()
+  private var engineResourceFactory: EngineResourceFactory = _
+  private var engineConnLaunchBuilder: EngineConnLaunchBuilder = _
+  private var engineConnFactory: EngineConnFactory = _
+  override def init(params: java.util.Map[String, Any]): Unit = {}
+
+  override def getEngineResourceFactory: EngineResourceFactory = {
+
+    EP_CONTEXT_CONSTRUCTOR_LOCK.synchronized{
+      if(null == engineResourceFactory){
+        engineResourceFactory = new GenericEngineResourceFactory
+      }
+      engineResourceFactory
+    }
+  }
+
+  override def getEngineConnLaunchBuilder: EngineConnLaunchBuilder = {
+    EP_CONTEXT_CONSTRUCTOR_LOCK.synchronized {
+      if (null == engineConnLaunchBuilder) {
+        engineConnLaunchBuilder = new SqoopEngineConnLaunchBuilder()
+      }
+      engineConnLaunchBuilder
+    }
+  }
+
+
+  override def getEngineConnFactory: EngineConnFactory = {
+    EP_CONTEXT_CONSTRUCTOR_LOCK.synchronized {
+      if (null == engineConnFactory) {
+        engineConnFactory = new SqoopEngineConnFactory
+      }
+      engineConnFactory
+    }
+  }
+
+  override def getDefaultLabels: java.util.List[Label[_]] = new java.util.ArrayList[Label[_]]
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEngineConnContext.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEngineConnContext.scala
new file mode 100644
index 000000000..be1d340db
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEngineConnContext.scala
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.context
+
+import org.apache.linkis.engineconnplugin.sqoop.client.config.ExecutionContext
+
+class SqoopEngineConnContext{
+  private var executionContext: ExecutionContext = _
+
+  def getExecutionContext: ExecutionContext = executionContext
+
+  def setExecutionContext(executionContext: ExecutionContext): Unit = this.executionContext = executionContext
+
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEnvConfiguration.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEnvConfiguration.scala
new file mode 100644
index 000000000..63417dbca
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopEnvConfiguration.scala
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.context
+
+import org.apache.linkis.common.conf.{CommonVars, TimeType}
+
+object SqoopEnvConfiguration {
+
+  val SQOOP_HADOOP_SITE_FILE: CommonVars[String] = CommonVars("wds.linkis.hadoop.site.xml", "core-site.xml;hdfs-site.xml;yarn-site.xml;mapred-site.xml")
+
+  val SQOOP_STATUS_FETCH_INTERVAL: CommonVars[TimeType] = CommonVars("sqoop.fetch.status.interval", new TimeType("5s"))
+
+  val LINKIS_DATASOURCE_SERVICE_NAME: CommonVars[String] = CommonVars("wds.linkis.datasource.service.name", "linkis-ps-data-source-manager")
+
+  val SQOOP_HOME: CommonVars[String] = CommonVars("SQOOP_HOME", "")
+
+  val  SQOOP_CONF_DIR: CommonVars[String] = CommonVars("SQOOP_CONF_DIR", "")
+
+  val SQOOP_HCAT_HOME: CommonVars[String] = CommonVars("HCAT_HOME", "")
+
+  val SQOOP_HBASE_HOME: CommonVars[String] = CommonVars("HBASE_HOME", "")
+
+  val SQOOP_ZOOCFGDIR: CommonVars[String] = CommonVars("ZOOCFGDIR", "")
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopParamsConfiguration.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopParamsConfiguration.scala
new file mode 100644
index 000000000..0c449a9d4
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopParamsConfiguration.scala
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.context
+
+import org.apache.linkis.common.conf.CommonVars
+
+/**
+  * Sqoop Params Configuration
+ */
+object SqoopParamsConfiguration {
+
+  val SQOOP_PARAM_MODE: CommonVars[String] = CommonVars("sqoop.params.name.mode", "sqoop.mode")
+
+  val SQOOP_PARAM_HOST: CommonVars[String] = CommonVars("sqoop.params.name.host", "sqoop.args.host")
+
+  val SQOOP_PARAM_PORT: CommonVars[String] = CommonVars("sqoop.params.name.ip", "sqoop.args.port")
+
+  val SQOOP_PARAM_CONNECT_PARAMS: CommonVars[String] = CommonVars("sqoop.params.name.ip", "sqoop.args.params")
+
+  val SQOOP_PARAM_CONNECT: CommonVars[String] = CommonVars("sqoop.params.name.connect", "sqoop.args.connect")
+
+  val SQOOP_PARAM_DATA_SOURCE: CommonVars[String] = CommonVars("sqoop.params.name.data-source", "sqoop.args.datasource.name")
+
+  val SQOOP_PARAM_PREFIX: CommonVars[String] = CommonVars("sqoop.params.name.prefix", "sqoop.args.")
+
+  val SQOOP_PARAM_ENV_PREFIX: CommonVars[String] = CommonVars("sqoop.params.name.env.prefix", "sqoop.env.")
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopResourceConfiguration.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopResourceConfiguration.scala
new file mode 100644
index 000000000..710d28cc2
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/context/SqoopResourceConfiguration.scala
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.context
+
+import org.apache.linkis.common.conf.CommonVars
+
+
+object SqoopResourceConfiguration {
+
+  val LINKIS_SQOOP_TASK_MAP_MEMORY: CommonVars[Int] = CommonVars[Int]("sqoop.task.map.memory", 2)
+
+  val LINKIS_SQOOP_TASK_MAP_CPU_CORES: CommonVars[Int] = CommonVars[Int]("sqoop.task.map.cpu.cores", 1)
+
+  val LINKIS_QUEUE_NAME: CommonVars[String] = CommonVars[String]("wds.linkis.rm.yarnqueue", "default")
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala
new file mode 100644
index 000000000..5c43366ba
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopExecutor.scala
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.executor
+
+import org.apache.linkis.engineconn.executor.entity.{LabelExecutor, ResourceExecutor, YarnExecutor}
+import org.apache.linkis.engineconnplugin.sqoop.client.Sqoop
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopResourceConfiguration.LINKIS_QUEUE_NAME
+import org.apache.linkis.manager.common.entity.resource.NodeResource
+import org.apache.linkis.manager.label.entity.Label
+import java.util
+
+import org.apache.linkis.engineconnplugin.sqoop.client.Sqoop
+import org.apache.linkis.engineconnplugin.sqoop.client.exception.JobExecutionException
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext
+
+trait SqoopExecutor extends YarnExecutor with LabelExecutor with ResourceExecutor{
+  private var yarnMode: String = "Client"
+  private var executorLabels: util.List[Label[_]] = new util.ArrayList[Label[_]]
+  override def getApplicationId: String = Sqoop.getApplicationId
+
+  override def getApplicationURL: String = Sqoop.getApplicationURL
+
+  override def getYarnMode: String = yarnMode
+  def setYarnMode(yarnMode: String): Unit = this.yarnMode = yarnMode
+
+  override def getQueue: String = LINKIS_QUEUE_NAME.getValue
+
+  override def getExecutorLabels(): util.List[Label[_]] = executorLabels
+
+  override def setExecutorLabels(labels: util.List[Label[_]]): Unit = this.executorLabels = labels
+
+  override def requestExpectedResource(expectedResource: NodeResource): NodeResource = throw new JobExecutionException("Not support method for requestExpectedResource.")
+
+  protected val sqoopEngineConnContext: SqoopEngineConnContext
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala
new file mode 100644
index 000000000..1d7cb6af3
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.executor
+
+import org.apache.linkis.common.utils.{JsonUtils, OverloadUtils, Utils}
+import org.apache.linkis.engineconn.once.executor.{OnceExecutorExecutionContext, OperableOnceExecutor}
+import org.apache.linkis.engineconnplugin.sqoop.client.{LinkisSqoopClient, Sqoop}
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopResourceConfiguration.{LINKIS_QUEUE_NAME, LINKIS_SQOOP_TASK_MAP_CPU_CORES, LINKIS_SQOOP_TASK_MAP_MEMORY}
+import org.apache.linkis.engineconnplugin.sqoop.context.{SqoopEngineConnContext, SqoopParamsConfiguration}
+import org.apache.linkis.manager.common.entity.resource.{CommonNodeResource, DriverAndYarnResource, LoadInstanceResource, NodeResource, YarnResource}
+import org.apache.linkis.scheduler.executer.ErrorExecuteResponse
+import java.util
+import java.util.concurrent.{Future, TimeUnit}
+
+import org.apache.linkis.engineconn.common.creation.EngineCreationContext
+import org.apache.linkis.engineconn.core.EngineConnObject
+import org.apache.linkis.manager.engineplugin.common.conf.EngineConnPluginConf
+import org.apache.linkis.protocol.engine.JobProgressInfo
+import org.apache.linkis.engineconnplugin.sqoop.client.LinkisSqoopClient
+import org.apache.linkis.engineconnplugin.sqoop.client.exception.JobExecutionException
+import org.apache.linkis.engineconnplugin.sqoop.context.{SqoopEngineConnContext, SqoopEnvConfiguration}
+import org.apache.linkis.engineconnplugin.sqoop.params.SqoopParamsResolver
+
+
+class SqoopOnceCodeExecutor(override val id: Long,
+                            override protected val sqoopEngineConnContext: SqoopEngineConnContext) extends SqoopOnceExecutor with OperableOnceExecutor{
+
+
+  private var params: util.Map[String, String] = _
+  private var future: Future[_] = _
+  private var daemonThread: Future[_] = _
+  private val paramsResolvers: Array[SqoopParamsResolver] = Array()
+
+  override def doSubmit(onceExecutorExecutionContext: OnceExecutorExecutionContext, options: Map[String, String]): Unit = {
+    var isFailed = false
+    future = Utils.defaultScheduler.submit(new Runnable {
+      override def run(): Unit = {
+        // TODO filter job content
+        params = onceExecutorExecutionContext.getOnceExecutorContent.getJobContent.asInstanceOf[util.Map[String, String]]
+        info("Try to execute params." + params)
+          if(runSqoop(params, onceExecutorExecutionContext.getEngineCreationContext) != 0) {
+            isFailed = true
+            tryFailed()
+            setResponse(ErrorExecuteResponse("Run code failed!", new JobExecutionException("Exec Sqoop Code Error")))
+          }
+          info("All codes completed, now to stop SqoopEngineConn.")
+          closeDaemon()
+          if (!isFailed) {
+            trySucceed()
+          }
+          this synchronized notify()
+      }
+    })
+  }
+  protected def runSqoop(params: util.Map[String, String], context: EngineCreationContext): Int = {
+    Utils.tryCatch {
+      val finalParams = paramsResolvers.foldLeft(params) {
+        case (newParam, resolver) => resolver.resolve(newParam, context)
+      }
+      LinkisSqoopClient.run(finalParams)
+    }{
+      case e: Exception =>
+        error(s"Run Error Message: ${e.getMessage}", e)
+        -1
+    }
+
+  }
+
+  override protected def waitToRunning(): Unit = {
+    if (!isCompleted) daemonThread = Utils.defaultScheduler.scheduleAtFixedRate(new Runnable {
+      override def run(): Unit = {
+        if (!(future.isDone || future.isCancelled)) {
+          info("The Sqoop Process In Running")
+        }
+      }
+    }, SqoopEnvConfiguration.SQOOP_STATUS_FETCH_INTERVAL.getValue.toLong,
+      SqoopEnvConfiguration.SQOOP_STATUS_FETCH_INTERVAL.getValue.toLong, TimeUnit.MILLISECONDS)
+  }
+  override def getCurrentNodeResource(): NodeResource = {
+    val memorySuffix = "g"
+    val properties = EngineConnObject.getEngineCreationContext.getOptions
+    Option(properties.get(EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.key)).foreach(memory => {
+      if (! memory.toLowerCase.endsWith(memorySuffix)) {
+        properties.put(EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.key, memory + memorySuffix)
+      }
+    })
+    val resource = new DriverAndYarnResource(
+      new LoadInstanceResource(EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.getValue(properties).toLong,
+        EngineConnPluginConf.JAVA_ENGINE_REQUEST_CORES.getValue(properties),
+        EngineConnPluginConf.JAVA_ENGINE_REQUEST_INSTANCE),
+      new YarnResource(LINKIS_SQOOP_TASK_MAP_MEMORY.getValue * getNumTasks, LINKIS_SQOOP_TASK_MAP_CPU_CORES.getValue * getNumTasks, 0, LINKIS_QUEUE_NAME.getValue)
+    )
+    val engineResource = new CommonNodeResource
+    engineResource.setUsedResource(resource)
+    engineResource
+  }
+
+  def getNumTasks: Int = {
+    if (params != null) {
+      params.getOrDefault("sqoop.args.num.mappers", "1").toInt
+    } else {
+      0
+    }
+  }
+  protected def closeDaemon(): Unit = {
+    if (daemonThread != null) daemonThread.cancel(true)
+  }
+
+  override def getProgress: Float = LinkisSqoopClient.progress()
+
+  override def getProgressInfo: Array[JobProgressInfo] = {
+    val progressInfo = LinkisSqoopClient.getProgressInfo
+    info(s"Progress Info, id: ${progressInfo.id}, total: ${progressInfo.totalTasks}, running: ${progressInfo.runningTasks}," +
+      s" succeed: ${progressInfo.succeedTasks}, fail: ${progressInfo.failedTasks}")
+    Array(progressInfo)
+  }
+
+
+  override def getMetrics: util.Map[String, Any] = {
+    val metrics = LinkisSqoopClient.getMetrics.asInstanceOf[util.Map[String, Any]]
+    // Report the resource
+    metrics.put("NodeResourceJson", getCurrentNodeResource().getUsedResource.toJson)
+    metrics
+  }
+
+  override def getDiagnosis: util.Map[String, Any] = LinkisSqoopClient.getDiagnosis.asInstanceOf[util.Map[String, Any]]
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceExecutor.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceExecutor.scala
new file mode 100644
index 000000000..727d1f640
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceExecutor.scala
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.executor
+
+import org.apache.linkis.common.utils.Utils
+import org.apache.linkis.engineconn.core.hook.ShutdownHook
+import org.apache.linkis.engineconn.once.executor.{ManageableOnceExecutor, OnceExecutorExecutionContext}
+import org.apache.linkis.engineconnplugin.sqoop.client.LinkisSqoopClient
+import org.apache.linkis.manager.common.entity.enumeration.NodeStatus
+import org.apache.linkis.engineconnplugin.sqoop.client.{LinkisSqoopClient, Sqoop}
+
+import scala.collection.convert.WrapAsScala._
+
+
+trait SqoopOnceExecutor extends ManageableOnceExecutor with SqoopExecutor{
+  protected def submit(onceExecutorExecutionContext: OnceExecutorExecutionContext): Unit = {
+    val options = onceExecutorExecutionContext.getOnceExecutorContent.getJobContent.map {
+      case (k, v: String) => k -> v
+      case (k, v) if v != null => k -> v.toString
+      case (k, _) => k -> null
+    }.toMap
+    doSubmit(onceExecutorExecutionContext, options)
+  }
+  def doSubmit(onceExecutorExecutionContext: OnceExecutorExecutionContext, options: Map[String, String]): Unit
+
+  val id: Long
+
+  override def getId: String = "SqoopOnceApp_" + id
+  override def close(): Unit = {
+    Sqoop.close()
+    super.close()
+  }
+  override def trySucceed(): Boolean = {
+    super.trySucceed()
+  }
+
+
+  override def ensureAvailable[A](f: => A): A = {
+    // Not need to throws exception
+    Utils.tryQuietly{ super.ensureAvailable(f) }
+  }
+
+  override def tryFailed(): Boolean = {
+    LinkisSqoopClient.close()
+    super.tryFailed()
+  }
+
+  override def supportCallBackLogs(): Boolean = true
+
+
+  protected def isCompleted: Boolean = isClosed || NodeStatus.isCompleted(getStatus)
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopEngineConnFactory.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopEngineConnFactory.scala
new file mode 100644
index 000000000..25666df2a
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopEngineConnFactory.scala
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.factory
+
+import org.apache.linkis.common.utils.Logging
+import org.apache.linkis.engineconn.common.creation.EngineCreationContext
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext
+import org.apache.linkis.engineconnplugin.sqoop.util.ClassUtil
+import org.apache.linkis.manager.engineplugin.common.creation.{ExecutorFactory, MultiExecutorEngineConnFactory}
+import org.apache.linkis.manager.label.entity.engine.EngineType
+import org.apache.linkis.manager.label.entity.engine.EngineType.EngineType
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext
+import org.apache.linkis.engineconnplugin.sqoop.util.ClassUtil
+
+class SqoopEngineConnFactory extends MultiExecutorEngineConnFactory with Logging{
+  override def getExecutorFactories: Array[ExecutorFactory] = executorFactoryArray
+
+  override protected def getDefaultExecutorFactoryClass: Class[_ <: ExecutorFactory] = classOf[SqoopExecutorFactory]
+
+  override protected def getEngineConnType: EngineType = EngineType.SQOOP
+
+  override protected def createEngineConnSession(engineCreationContext: EngineCreationContext): Any = {
+    //val environmentContext = createEnvironmentContext(engineCreationContext)
+    val sqoopEngineConnContext = new SqoopEngineConnContext()
+    sqoopEngineConnContext
+  }
+
+
+  private val executorFactoryArray = Array[ExecutorFactory](ClassUtil.getInstance(classOf[SqoopExecutorFactory], new SqoopExecutorFactory))
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopExecutorFactory.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopExecutorFactory.scala
new file mode 100644
index 000000000..4b39fa0f0
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/factory/SqoopExecutorFactory.scala
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.factory
+
+import org.apache.linkis.engineconn.common.creation.EngineCreationContext
+import org.apache.linkis.engineconn.common.engineconn.EngineConn
+import org.apache.linkis.engineconn.once.executor.OnceExecutor
+import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorFactory
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext
+import org.apache.linkis.engineconnplugin.sqoop.executor.SqoopOnceCodeExecutor
+import org.apache.linkis.manager.label.entity.Label
+import org.apache.linkis.manager.label.entity.engine.RunType.{APPCONN, RunType}
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEngineConnContext
+import org.apache.linkis.engineconnplugin.sqoop.executor.SqoopOnceCodeExecutor
+
+class SqoopExecutorFactory extends OnceExecutorFactory{
+
+
+  override protected def getRunType: RunType = APPCONN
+
+  override protected def newExecutor(id: Int, engineCreationContext: EngineCreationContext, engineConn: EngineConn, labels: Array[Label[_]]): OnceExecutor = {
+    engineConn.getEngineConnSession match {
+      case context: SqoopEngineConnContext =>
+        new SqoopOnceCodeExecutor(id, context)
+
+    }
+  }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/launch/SqoopEngineConnLaunchBuilder.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/launch/SqoopEngineConnLaunchBuilder.scala
new file mode 100644
index 000000000..a643b792c
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/launch/SqoopEngineConnLaunchBuilder.scala
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.launch
+
+import java.nio.file.Paths
+import java.util
+import java.util.concurrent.TimeUnit
+
+import org.apache.linkis.engineconnplugin.sqoop.context.SqoopEnvConfiguration._
+import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnBuildRequest
+import org.apache.linkis.manager.engineplugin.common.launch.process.Environment.{variable, _}
+import org.apache.linkis.manager.engineplugin.common.launch.process.JavaProcessEngineConnLaunchBuilder
+import org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants._
+import org.apache.commons.io.IOUtils
+import org.apache.commons.lang3.StringUtils
+
+import scala.collection.JavaConverters._
+
+class SqoopEngineConnLaunchBuilder extends JavaProcessEngineConnLaunchBuilder{
+
+  override protected def getEnvironment(implicit engineConnBuildRequest: EngineConnBuildRequest): util.Map[String, String] = {
+    val environment = super.getEnvironment
+    // Basic classpath
+    addPathToClassPath(environment, variable(HADOOP_CONF_DIR))
+    addExistPathToClassPath(environment, Seq(SQOOP_CONF_DIR.getValue))
+    if (StringUtils.isNotBlank(SQOOP_HOME.getValue)) {
+      addPathToClassPath(environment, Seq(SQOOP_HOME.getValue, "/*"))
+      addPathToClassPath(environment, Seq(SQOOP_HOME.getValue, "/lib/*"))
+    }
+    // HBase classpath
+    if (StringUtils.isNotBlank(SQOOP_HBASE_HOME.getValue) && Paths.get(SQOOP_HBASE_HOME.getValue).toFile.exists()) {
+      resolveCommandToClassPath(environment, SQOOP_HBASE_HOME.getValue + "/bin/hbase classpath")
+    }
+    // HCat classpath
+    if (StringUtils.isNotBlank(SQOOP_HCAT_HOME.getValue) && Paths.get(SQOOP_HCAT_HOME.getValue).toFile.exists()) {
+      resolveCommandToClassPath(environment, SQOOP_HCAT_HOME.getValue + "/bin/hcat -classpath")
+    }
+    addExistPathToClassPath(environment, Seq(SQOOP_ZOOCFGDIR.getValue))
+    environment
+  }
+
+
+  override protected def getNecessaryEnvironment(implicit engineConnBuildRequest: EngineConnBuildRequest): Array[String] = {
+    // To submit a mapReduce job, we should load the configuration from hadoop config dir
+    Array(HADOOP_CONF_DIR.toString, SQOOP_HOME.key)
+  }
+
+  private def addExistPathToClassPath(env: util.Map[String, String], path: String): Unit = {
+    if (StringUtils.isNotBlank(path) && Paths.get(path).toFile.exists()) {
+      addPathToClassPath(env, path)
+    }
+  }
+  private def resolveCommandToClassPath(env: util.Map[String, String], command: String): Unit = {
+    trace(s"Invoke command [${command}] to get class path sequence")
+    val builder = new ProcessBuilder(Array("/bin/bash", "-c", command): _*)
+    // Set the environment
+    builder.environment.putAll(sys.env.asJava)
+    builder.redirectErrorStream(false)
+    val process = builder.start()
+    if(process.waitFor(5, TimeUnit.SECONDS) &&
+       process.waitFor() == 0) {
+       val jarPathSerial = IOUtils.toString(process.getInputStream).trim()
+       // TODO we should decide separator in different environment
+      val separatorChar = ":"
+       val jarPathList = StringUtils.split(jarPathSerial, separatorChar).filterNot(jarPath => {
+         val splitIndex = jarPath.lastIndexOf("/")
+         val jarName = if (splitIndex >= 0) jarPath.substring(splitIndex + 1) else jarPath
+         jarName.matches("^jasper-compiler-[\\s\\S]+?\\.jar$") || jarName.matches("^jsp-[\\s\\S]+?\\.jar$") || jarName.matches("^disruptor-[\\s\\S]+?\\.jar")
+       }).toList
+       addPathToClassPath(env, StringUtils.join(jarPathList.asJava, separatorChar))
+    }
+    // Release the process
+    process.destroy();
+  }
+  private implicit def buildPath(paths: Seq[String]): String = Paths.get(paths.head, paths.tail: _*).toFile.getPath
+
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopParamsResolver.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopParamsResolver.scala
new file mode 100644
index 000000000..464fc3922
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopParamsResolver.scala
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.params
+
+import java.util
+
+import org.apache.linkis.engineconn.common.creation.EngineCreationContext
+/**
+  * Resolve the engine job params
+ */
+trait SqoopParamsResolver {
+
+  /**
+   * main method
+   * @param params input
+   * @return
+   */
+  def resolve(params: util.Map[String, String], context: EngineCreationContext): util.Map[String, String]
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/resource/SqoopEngineConnResourceFactory.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/resource/SqoopEngineConnResourceFactory.scala
new file mode 100644
index 000000000..1634e8b7e
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/resource/SqoopEngineConnResourceFactory.scala
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.resource
+
+import org.apache.linkis.manager.common.entity.resource.{LoadInstanceResource, Resource}
+import org.apache.linkis.manager.engineplugin.common.resource.AbstractEngineResourceFactory
+
+import java.util
+
+class SqoopEngineConnResourceFactory extends AbstractEngineResourceFactory{
+  override protected def getRequestResource(properties: util.Map[String, String]): Resource = {
+    new LoadInstanceResource(1,
+      1,
+      1)
+  }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/util/ClassUtil.scala b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/util/ClassUtil.scala
new file mode 100644
index 000000000..c3476821f
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/util/ClassUtil.scala
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconnplugin.sqoop.util
+
+import org.apache.linkis.common.utils.{ClassUtils, Utils}
+import org.apache.linkis.engineconnplugin.sqoop.client.exception.JobExecutionException
+
+import scala.collection.convert.wrapAsScala._
+
+object ClassUtil {
+
+  def getInstance[T](clazz: Class[T] , defaultValue: T): T = {
+    val classes = ClassUtils.reflections.getSubTypesOf(clazz).filterNot(ClassUtils.isInterfaceOrAbstract).toArray
+    if(classes.length <= 1) defaultValue
+    else if(classes.length == 2) {
+      val realClass = if(classes(0) == defaultValue.getClass) classes(1) else classes(0);
+      Utils.tryThrow(realClass.newInstance) { t =>
+        new JobExecutionException(s"New a instance of ${clazz.getSimpleName} failed!", t);
+      }
+    } else {
+      throw new JobExecutionException(s"Too many subClasses of ${clazz.getSimpleName}, list: $classes.");
+    }
+  }
+
+}
diff --git a/linkis-engineconn-plugins/pom.xml b/linkis-engineconn-plugins/pom.xml
index 5db743a80..e26f10285 100644
--- a/linkis-engineconn-plugins/pom.xml
+++ b/linkis-engineconn-plugins/pom.xml
@@ -44,6 +44,7 @@
         <module>engineconn-plugins/pipeline</module>
         <module>engineconn-plugins/jdbc</module>
         <module>engineconn-plugins/flink</module>
+        <module>engineconn-plugins/sqoop</module>
     </modules>
 
 </project>
\ No newline at end of file
diff --git a/tool/dependencies/known-dependencies.txt b/tool/dependencies/known-dependencies.txt
index eb082ad0b..e8e51236d 100644
--- a/tool/dependencies/known-dependencies.txt
+++ b/tool/dependencies/known-dependencies.txt
@@ -499,6 +499,7 @@ spring-webmvc-5.2.15.RELEASE.jar
 ssl-config-core_2.11-0.3.7.jar
 stax2-api-4.2.1.jar
 stringtemplate-3.2.1.jar
+sqoop-1.4.6-hadoop200.jar
 tephra-api-0.6.0.jar
 tephra-core-0.6.0.jar
 tephra-hbase-compat-1.0-0.6.0.jar


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org