You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by ch...@apache.org on 2021/10/12 06:47:10 UTC

[incubator-kyuubi] branch master updated: [KYUUBI #1152] [SUB-TASK][TEST] Replace hive-jdbc by kyuubi-hive-jdbc in UT

This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 1f8fa86  [KYUUBI #1152] [SUB-TASK][TEST] Replace hive-jdbc by kyuubi-hive-jdbc in UT
1f8fa86 is described below

commit 1f8fa866440a0b765bdec2ec1d255b948392b925
Author: Cheng Pan <ch...@apache.org>
AuthorDate: Tue Oct 12 14:47:01 2021 +0800

    [KYUUBI #1152] [SUB-TASK][TEST] Replace hive-jdbc by kyuubi-hive-jdbc in UT
    
    <!--
    Thanks for sending a pull request!
    
    Here are some tips for you:
      1. If this is your first time, please read our contributor guidelines: https://kyuubi.readthedocs.io/en/latest/community/contributions.html
      2. If the PR is related to an issue in https://github.com/apache/incubator-kyuubi/issues, add '[KYUUBI #XXXX]' in your PR title, e.g., '[KYUUBI #XXXX] Your PR title ...'.
      3. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP][KYUUBI #XXXX] Your PR title ...'.
    -->
    
    ### _Why are the changes needed?_
    <!--
    Please clarify why the changes are needed. For instance,
      1. If you add a feature, you can talk about the use case of it.
      2. If you fix a bug, you can clarify why it is a bug.
    -->
    Sub task of #1131
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run test](https://kyuubi.readthedocs.io/en/latest/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #1152 from pan3793/ut-kyuubi-hive-jdbc.
    
    Closes #1152
    
    2f508356 [Cheng Pan] Avoid hardcode Hive Jdbc class
    5032d23d [Cheng Pan] [KYUUBI #1131][TEST] Replace hive-jdbc by kyuubi-hive-jdbc in UT
    
    Authored-by: Cheng Pan <ch...@apache.org>
    Signed-off-by: Cheng Pan <ch...@apache.org>
---
 .github/workflows/master.yml                       | 15 ++--
 externals/kyuubi-spark-sql-engine/pom.xml          | 34 ---------
 .../spark/operation/SparkOperationSuite.scala      | 12 ++--
 kubernetes/integration-tests/pom.xml               |  1 +
 .../apache/kyuubi/operation/JDBCTestUtils.scala    |  6 ++
 kyuubi-server/pom.xml                              | 18 ++---
 pom.xml                                            | 82 +---------------------
 7 files changed, 31 insertions(+), 137 deletions(-)

diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 21d2e93..8dac23f 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -138,9 +138,12 @@ jobs:
           restore-keys: |
             ${{ runner.os }}-maven-io-
       - name: Run TPC-DS Tests
-        run: |
-          ./build/mvn clean install -Pspark-3.1 -DskipTests -pl :kyuubi-spark-sql-engine_2.12,:kyuubi-common_2.12,:kyuubi-ha_2.12,:kyuubi-zookeeper_2.12,:kyuubi-spark-monitor_2.12
-          ./build/mvn test -Pspark-3.1 -Dtest=none -DwildcardSuites=org.apache.kyuubi.operation.tpcds -Dmaven.plugin.scalatest.exclude.tags=''
+        run: >-
+          ./build/mvn clean install -Dmaven.javadoc.skip=true -V
+          -pl kyuubi-server -am
+          -Pspark-3.1
+          -Dmaven.plugin.scalatest.exclude.tags=''
+          -Dtest=none -DwildcardSuites=org.apache.kyuubi.operation.tpcds
 
   minikube-it:
     name: Minikube Integration Test
@@ -176,7 +179,11 @@ jobs:
       - name: kyuubi pod check
         run: kubectl get pods
       - name: integration tests
-        run: ./build/mvn clean test -pl :kyuubi-common_2.12,:kubernetes-integration-tests_2.12 -Pkubernetes -Dtest=none -DwildcardSuites=org.apache.kyuubi.kubernetes.test
+        run: >-
+          ./build/mvn clean install -Dmaven.javadoc.skip=true -V
+          -pl kubernetes/integration-tests -am
+          -Pkubernetes
+          -Dtest=none -DwildcardSuites=org.apache.kyuubi.kubernetes.test
       - name: Upload test logs
         if: failure()
         uses: actions/upload-artifact@v2
diff --git a/externals/kyuubi-spark-sql-engine/pom.xml b/externals/kyuubi-spark-sql-engine/pom.xml
index 74486e5..64d9a83 100644
--- a/externals/kyuubi-spark-sql-engine/pom.xml
+++ b/externals/kyuubi-spark-sql-engine/pom.xml
@@ -106,40 +106,6 @@
         </dependency>
 
         <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-common</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-jdbc</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-serde</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-service</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-llap-client</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-llap-common</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
             <groupId>org.apache.kyuubi</groupId>
             <artifactId>kyuubi-hive-jdbc</artifactId>
             <version>${project.version}</version>
diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
index 0e9e03d..a31e4f6 100644
--- a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
+++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.kyuubi.engine.spark.operation
 
-import java.sql.{DatabaseMetaData, ResultSet, SQLFeatureNotSupportedException}
+import java.sql.{DatabaseMetaData, ResultSet, SQLException, SQLFeatureNotSupportedException}
 
 import scala.collection.JavaConverters._
 import scala.util.Random
@@ -27,8 +27,6 @@ import org.apache.hadoop.hive.thrift.{DelegationTokenIdentifier => HiveTokenIden
 import org.apache.hadoop.io.Text
 import org.apache.hadoop.security.{Credentials, UserGroupInformation}
 import org.apache.hadoop.security.token.{Token, TokenIdentifier}
-import org.apache.hive.common.util.HiveVersionInfo
-import org.apache.hive.service.cli.HiveSQLException
 import org.apache.hive.service.rpc.thrift._
 import org.apache.spark.kyuubi.SparkContextHelper
 import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
@@ -406,8 +404,8 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveJDBCTests {
       assert(metaData.allTablesAreSelectable)
       assert(metaData.getDatabaseProductName === "Spark SQL")
       assert(metaData.getDatabaseProductVersion === KYUUBI_VERSION)
-      assert(metaData.getDriverName === "Hive JDBC")
-      assert(metaData.getDriverVersion === HiveVersionInfo.getVersion)
+      assert(metaData.getDriverName === "Kyuubi Project Hive JDBC Client")
+      assert(metaData.getDriverVersion === KYUUBI_VERSION)
       assert(metaData.getDatabaseMajorVersion === Utils.majorVersion(KYUUBI_VERSION))
       assert(metaData.getDatabaseMinorVersion === Utils.minorVersion(KYUUBI_VERSION))
       assert(metaData.getIdentifierQuoteString === " ",
@@ -454,9 +452,9 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveJDBCTests {
       assert(metaData.getDefaultTransactionIsolation === java.sql.Connection.TRANSACTION_NONE)
       assert(!metaData.supportsTransactions)
       assert(!metaData.getProcedureColumns("", "%", "%", "%").next())
-      intercept[HiveSQLException](metaData.getPrimaryKeys("", "default", ""))
+      intercept[SQLException](metaData.getPrimaryKeys("", "default", ""))
       assert(!metaData.getImportedKeys("", "default", "").next())
-      intercept[HiveSQLException] {
+      intercept[SQLException] {
         metaData.getCrossReference("", "default", "src", "", "default", "src2")
       }
       assert(!metaData.getIndexInfo("", "default", "src", true, true).next())
diff --git a/kubernetes/integration-tests/pom.xml b/kubernetes/integration-tests/pom.xml
index 1059ef4..10a7f52 100644
--- a/kubernetes/integration-tests/pom.xml
+++ b/kubernetes/integration-tests/pom.xml
@@ -69,6 +69,7 @@
             <groupId>org.apache.kyuubi</groupId>
             <artifactId>kyuubi-hive-jdbc</artifactId>
             <version>${project.version}</version>
+            <scope>test</scope>
         </dependency>
     </dependencies>
 
diff --git a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/JDBCTestUtils.scala b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/JDBCTestUtils.scala
index 4df5930..6ddf327 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/JDBCTestUtils.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/JDBCTestUtils.scala
@@ -32,6 +32,12 @@ import org.apache.kyuubi.service.authentication.PlainSASLHelper
 
 trait JDBCTestUtils extends KyuubiFunSuite {
 
+  // Load KyuubiHiveDriver class before using it, otherwise will cause the first call
+  // `DriverManager.getConnection("jdbc:hive2://...")` failure.
+  // Don't know why, Apache Spark also does the same thing.
+  def hiveJdbcDriverClass: String = "org.apache.kyuubi.jdbc.KyuubiHiveDriver"
+  Class.forName(hiveJdbcDriverClass)
+
   protected val dftSchema = "default"
   protected lazy val user: String = Utils.currentUser
   protected val patterns = Seq("", "*", "%", null, ".*", "_*", "_%", ".%")
diff --git a/kyuubi-server/pom.xml b/kyuubi-server/pom.xml
index 1503d39..8c12c2e 100644
--- a/kyuubi-server/pom.xml
+++ b/kyuubi-server/pom.xml
@@ -172,6 +172,13 @@
 
         <dependency>
             <groupId>org.apache.kyuubi</groupId>
+            <artifactId>kyuubi-hive-jdbc</artifactId>
+            <version>${project.version}</version>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.kyuubi</groupId>
             <artifactId>kyuubi-spark-sql-engine_${scala.binary.version}</artifactId>
             <version>${project.version}</version>
             <scope>test</scope>
@@ -191,17 +198,6 @@
 
         <dependency>
             <groupId>org.apache.hive</groupId>
-            <artifactId>hive-jdbc</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-service</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hive</groupId>
             <artifactId>hive-metastore</artifactId>
             <type>test-jar</type>
             <scope>test</scope>
diff --git a/pom.xml b/pom.xml
index cbf9ff1..1c716c4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -100,7 +100,6 @@
         <guava.version>30.1-jre</guava.version>
         <hadoop.version>3.2.2</hadoop.version>
         <hadoop.binary.version>3.2</hadoop.binary.version>
-        <hbase.version>1.2.3</hbase.version>
         <hive.version>2.3.7</hive.version>
         <hudi.version>0.9.0</hudi.version>
         <iceberg.name>iceberg-spark3-runtime</iceberg.name>
@@ -520,74 +519,6 @@
 
             <dependency>
                 <groupId>org.apache.hive</groupId>
-                <artifactId>hive-jdbc</artifactId>
-                <version>${hive.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>org.apache.hive</groupId>
-                        <artifactId>hive-common</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hive</groupId>
-                        <artifactId>hive-metastore</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hive</groupId>
-                        <artifactId>hive-serde</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hive</groupId>
-                        <artifactId>hive-service</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hive</groupId>
-                        <artifactId>hive-service-rpc</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hive</groupId>
-                        <artifactId>hive-shims</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.curator</groupId>
-                        <artifactId>curator-framework</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.thrift</groupId>
-                        <artifactId>libthrift</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.thrift</groupId>
-                        <artifactId>libfb303</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.zookeeper</groupId>
-                        <artifactId>zookeeper</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.slf4j</groupId>
-                        <artifactId>slf4j-api</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.slf4j</groupId>
-                        <artifactId>slf4j-log4j12</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>log4j</groupId>
-                        <artifactId>log4j</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>commons-logging</groupId>
-                        <artifactId>commons-logging</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.codehaus.groovy</groupId>
-                        <artifactId>groovy-all</artifactId>
-                    </exclusion>
-                </exclusions>
-            </dependency>
-
-            <dependency>
-                <groupId>org.apache.hive</groupId>
                 <artifactId>hive-serde</artifactId>
                 <version>${hive.version}</version>
                 <exclusions>
@@ -670,18 +601,6 @@
                 </exclusions>
             </dependency>
 
-            <dependency>
-                <groupId>org.apache.hive</groupId>
-                <artifactId>hive-service</artifactId>
-                <version>${hive.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>*</groupId>
-                        <artifactId>*</artifactId>
-                    </exclusion>
-                </exclusions>
-            </dependency>
-
             <!--
               because of THRIFT-4805, we don't upgrade to libthrift:0.12.0,
               because of THRIFT-5274, we don't upgrade to libthrift:0.13.0,
@@ -1383,6 +1302,7 @@
                                 <pattern>${maven.build.timestamp.format}</pattern>
                                 <timeSource>current</timeSource>
                                 <timeZone>Asia/Shanghai</timeZone>
+                                <locale>en_US</locale>
                             </configuration>
                         </execution>
                         <execution>