You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2019/09/14 21:53:49 UTC
[spark] branch branch-2.4 updated: [SPARK-29045][SQL][TESTS] Drop
table to avoid test failure in SQLMetricsSuite
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-2.4 by this push:
new 339b0f2 [SPARK-29045][SQL][TESTS] Drop table to avoid test failure in SQLMetricsSuite
339b0f2 is described below
commit 339b0f2a0c4043fca9cca52797936c8654910fc9
Author: LantaoJin <ji...@gmail.com>
AuthorDate: Wed Sep 11 23:05:03 2019 -0700
[SPARK-29045][SQL][TESTS] Drop table to avoid test failure in SQLMetricsSuite
### What changes were proposed in this pull request?
In method `SQLMetricsTestUtils.testMetricsDynamicPartition()`, there is a CREATE TABLE sentence without `withTable` block. It causes test failure if use same table name in other unit tests.
### Why are the changes needed?
To avoid "table already exists" in tests.
### Does this PR introduce any user-facing change?
No
### How was this patch tested?
Exist UT
Closes #25752 from LantaoJin/SPARK-29045.
Authored-by: LantaoJin <ji...@gmail.com>
Signed-off-by: Yuming Wang <wg...@gmail.com>
---
.../sql/execution/metric/SQLMetricsTestUtils.scala | 46 +++++++++++-----------
1 file changed, 24 insertions(+), 22 deletions(-)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsTestUtils.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsTestUtils.scala
index dcc540f..48ac52a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsTestUtils.scala
@@ -100,29 +100,31 @@ trait SQLMetricsTestUtils extends SQLTestUtils {
provider: String,
dataFormat: String,
tableName: String): Unit = {
- withTempPath { dir =>
- spark.sql(
- s"""
- |CREATE TABLE $tableName(a int, b int)
- |USING $provider
- |PARTITIONED BY(a)
- |LOCATION '${dir.toURI}'
- """.stripMargin)
- val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
- assert(table.location == makeQualifiedPath(dir.getAbsolutePath))
-
- val df = spark.range(start = 0, end = 40, step = 1, numPartitions = 1)
- .selectExpr("id a", "id b")
-
- // 40 files, 80 rows, 40 dynamic partitions.
- verifyWriteDataMetrics(Seq(40, 40, 80)) {
- df.union(df).repartition(2, $"a")
- .write
- .format(dataFormat)
- .mode("overwrite")
- .insertInto(tableName)
+ withTable(tableName) {
+ withTempPath { dir =>
+ spark.sql(
+ s"""
+ |CREATE TABLE $tableName(a int, b int)
+ |USING $provider
+ |PARTITIONED BY(a)
+ |LOCATION '${dir.toURI}'
+ """.stripMargin)
+ val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
+ assert(table.location == makeQualifiedPath(dir.getAbsolutePath))
+
+ val df = spark.range(start = 0, end = 40, step = 1, numPartitions = 1)
+ .selectExpr("id a", "id b")
+
+ // 40 files, 80 rows, 40 dynamic partitions.
+ verifyWriteDataMetrics(Seq(40, 40, 80)) {
+ df.union(df).repartition(2, $"a")
+ .write
+ .format(dataFormat)
+ .mode("overwrite")
+ .insertInto(tableName)
+ }
+ assert(TestUtils.recursiveList(dir).count(_.getName.startsWith("part-")) == 40)
}
- assert(TestUtils.recursiveList(dir).count(_.getName.startsWith("part-")) == 40)
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org