You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by ya...@apache.org on 2022/04/07 01:57:34 UTC

[incubator-kyuubi] branch master updated: [KYUUBI #2287] Revamp Flink IT by random port and merge tests

This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 19f1d4117 [KYUUBI #2287] Revamp Flink IT by random port and merge tests
19f1d4117 is described below

commit 19f1d411783112048130baeb0eb2a74771d17e3f
Author: Kent Yao <ya...@apache.org>
AuthorDate: Thu Apr 7 09:57:24 2022 +0800

    [KYUUBI #2287] Revamp Flink IT by random port and merge tests
    
    ### _Why are the changes needed?_
    
    Flink IT shall use random-available port for kyuubi server
    we shall merge ITs to prevent creating duplicated servers and engines
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #2287 from yaooqinn/flink.
    
    Closes #2287
    
    a0acce20 [Kent Yao] Revamp Flink IT by random port and merge tests
    ad5ae0f4 [Kent Yao] Revamp Flink IT by random port and merge tests
    
    Authored-by: Kent Yao <ya...@apache.org>
    Signed-off-by: Kent Yao <ya...@apache.org>
---
 .../kyuubi/it/flink/FlinkSQLEngineSuite.scala      | 48 ----------------------
 .../it/flink/operation/FlinkOperationSuite.scala   | 20 ++++++++-
 2 files changed, 18 insertions(+), 50 deletions(-)

diff --git a/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/FlinkSQLEngineSuite.scala b/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/FlinkSQLEngineSuite.scala
deleted file mode 100644
index 814a6d1dd..000000000
--- a/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/FlinkSQLEngineSuite.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.it.flink
-
-import org.apache.kyuubi.config.KyuubiConf
-import org.apache.kyuubi.config.KyuubiConf.{ENGINE_TYPE, FRONTEND_THRIFT_BINARY_BIND_PORT}
-import org.apache.kyuubi.operation.HiveJDBCTestHelper
-
-class FlinkSQLEngineSuite extends WithKyuubiServerAndFlinkMiniCluster with HiveJDBCTestHelper {
-
-  override val conf: KyuubiConf = KyuubiConf()
-    .set(ENGINE_TYPE, "FLINK_SQL")
-    .set(FRONTEND_THRIFT_BINARY_BIND_PORT, 10029)
-    .set("flink.parallelism.default", "6")
-
-  override protected def jdbcUrl: String = getJdbcUrl
-
-  test("set kyuubi conf into flink conf") {
-    withJdbcStatement() { statement =>
-      val resultSet = statement.executeQuery("SET")
-      // Flink does not support set key without value currently,
-      // thus read all rows to find the desired one
-      var success = false
-      while (resultSet.next() && success == false) {
-        if (resultSet.getString(1) == "parallelism.default" &&
-          resultSet.getString(2) == "6") {
-          success = true
-        }
-      }
-      assert(success)
-    }
-  }
-}
diff --git a/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/operation/FlinkOperationSuite.scala b/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/operation/FlinkOperationSuite.scala
index 2ab44a4ef..583c366a2 100644
--- a/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/operation/FlinkOperationSuite.scala
+++ b/integration-tests/kyuubi-flink-it/src/test/scala/org/apache/kyuubi/it/flink/operation/FlinkOperationSuite.scala
@@ -18,7 +18,7 @@
 package org.apache.kyuubi.it.flink.operation
 
 import org.apache.kyuubi.config.KyuubiConf
-import org.apache.kyuubi.config.KyuubiConf.{ENGINE_TYPE, FRONTEND_THRIFT_BINARY_BIND_PORT}
+import org.apache.kyuubi.config.KyuubiConf.ENGINE_TYPE
 import org.apache.kyuubi.it.flink.WithKyuubiServerAndFlinkMiniCluster
 import org.apache.kyuubi.operation.HiveJDBCTestHelper
 import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant.TABLE_CAT
@@ -26,7 +26,7 @@ import org.apache.kyuubi.operation.meta.ResultSetSchemaConstant.TABLE_CAT
 class FlinkOperationSuite extends WithKyuubiServerAndFlinkMiniCluster with HiveJDBCTestHelper {
   override val conf: KyuubiConf = KyuubiConf()
     .set(ENGINE_TYPE, "FLINK_SQL")
-    .set(FRONTEND_THRIFT_BINARY_BIND_PORT, 10019)
+    .set("flink.parallelism.default", "6")
 
   override protected def jdbcUrl: String = getJdbcUrl
 
@@ -50,4 +50,20 @@ class FlinkOperationSuite extends WithKyuubiServerAndFlinkMiniCluster with HiveJ
       assert(resultSet.getString(1) === "tmp.hello")
     }
   }
+
+  test("set kyuubi conf into flink conf") {
+    withJdbcStatement() { statement =>
+      val resultSet = statement.executeQuery("SET")
+      // Flink does not support set key without value currently,
+      // thus read all rows to find the desired one
+      var success = false
+      while (resultSet.next() && !success) {
+        if (resultSet.getString(1) == "parallelism.default" &&
+          resultSet.getString(2) == "6") {
+          success = true
+        }
+      }
+      assert(success)
+    }
+  }
 }