You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by da...@apache.org on 2023/04/23 01:25:51 UTC

[hudi] branch revert-8528-HUDI-6119 created (now ae117f443ab)

This is an automated email from the ASF dual-hosted git repository.

danny0405 pushed a change to branch revert-8528-HUDI-6119
in repository https://gitbox.apache.org/repos/asf/hudi.git


      at ae117f443ab Revert "[HUDI-6119] Add testcase for timeline timezone UTC (#8528)"

This branch includes the following new commits:

     new ae117f443ab Revert "[HUDI-6119] Add testcase for timeline timezone UTC (#8528)"

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[hudi] 01/01: Revert "[HUDI-6119] Add testcase for timeline timezone UTC (#8528)"

Posted by da...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

danny0405 pushed a commit to branch revert-8528-HUDI-6119
in repository https://gitbox.apache.org/repos/asf/hudi.git

commit ae117f443ab10e6bd2dbd44e24bb593c36d1f880
Author: Danny Chan <yu...@gmail.com>
AuthorDate: Sun Apr 23 09:25:43 2023 +0800

    Revert "[HUDI-6119] Add testcase for timeline timezone UTC (#8528)"
    
    This reverts commit 99eabd562c5ce2a8626708b67cf5617d3c84ce77.
---
 .../org/apache/hudi/TestHoodieSparkSqlWriter.scala | 48 +---------------------
 1 file changed, 2 insertions(+), 46 deletions(-)

diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/TestHoodieSparkSqlWriter.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/TestHoodieSparkSqlWriter.scala
index 14ffb125805..2c05a883fce 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/TestHoodieSparkSqlWriter.scala
+++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/TestHoodieSparkSqlWriter.scala
@@ -23,7 +23,6 @@ import org.apache.hudi.DataSourceWriteOptions._
 import org.apache.hudi.HoodieSparkUtils.gteqSpark3_0
 import org.apache.hudi.client.SparkRDDWriteClient
 import org.apache.hudi.common.model._
-import org.apache.hudi.common.table.timeline.HoodieInstantTimeGenerator
 import org.apache.hudi.common.table.{HoodieTableConfig, HoodieTableMetaClient, TableSchemaResolver}
 import org.apache.hudi.common.testutils.HoodieTestDataGenerator
 import org.apache.hudi.config.{HoodieBootstrapConfig, HoodieIndexConfig, HoodieWriteConfig}
@@ -50,10 +49,8 @@ import org.scalatest.Assertions.assertThrows
 import org.scalatest.Matchers.{be, convertToAnyShouldWrapper, intercept}
 
 import java.io.IOException
-import java.time.format.DateTimeFormatterBuilder
-import java.time.{Instant, ZoneId}
-import java.time.temporal.ChronoField
-import java.util.{Collections, Date, TimeZone, UUID}
+import java.time.Instant
+import java.util.{Collections, Date, UUID}
 import scala.collection.JavaConversions._
 import scala.collection.JavaConverters
 
@@ -1211,47 +1208,6 @@ class TestHoodieSparkSqlWriter {
     assert(exc.getMessage.contains("Consistent hashing bucket index does not work with COW table. Use simple bucket index or an MOR table."))
   }
 
-  /*
-   * Test case for instant is generated with commit timezone when TIMELINE_TIMEZONE set to UTC
-   * related to HUDI-5978
-   */
-  @Test
-  def testInsertDatasetWIthTimelineTimezoneUTC(): Unit = {
-    val fooTableModifier = commonTableModifier.updated(DataSourceWriteOptions.OPERATION.key, DataSourceWriteOptions.INSERT_OPERATION_OPT_VAL)
-      .updated(DataSourceWriteOptions.INSERT_DROP_DUPS.key, "false")
-      .updated(HoodieTableConfig.TIMELINE_TIMEZONE.key, "UTC") // utc timezone
-
-    // generate the inserts
-    val schema = DataSourceTestUtils.getStructTypeExampleSchema
-    val structType = AvroConversionUtils.convertAvroSchemaToStructType(schema)
-    val records = DataSourceTestUtils.generateRandomRows(100)
-    val recordsSeq = convertRowListToSeq(records)
-    val df = spark.createDataFrame(sc.parallelize(recordsSeq), structType)
-
-    // get UTC instant before write
-    val beforeWriteInstant = Instant.now()
-
-    // set local timezone to America/Los_Angeles(UTC-7)
-    TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
-
-    // write to Hudi
-    val (success, writeInstantTimeOpt, _, _, _, hoodieTableConfig) = HoodieSparkSqlWriter.write(sqlContext, SaveMode.Append, fooTableModifier, df)
-    assertTrue(success)
-    val hoodieTableTimelineTimezone = HoodieTimelineTimeZone.valueOf(hoodieTableConfig.getString(HoodieTableConfig.TIMELINE_TIMEZONE))
-    assertEquals(hoodieTableTimelineTimezone, HoodieTimelineTimeZone.UTC)
-
-    val utcFormatter = new DateTimeFormatterBuilder()
-      .appendPattern(HoodieInstantTimeGenerator.SECS_INSTANT_TIMESTAMP_FORMAT)
-      .appendValue(ChronoField.MILLI_OF_SECOND, 3)
-      .toFormatter
-      .withZone(ZoneId.of("UTC"))
-    // instant parsed by UTC timezone
-    val writeInstant = Instant.from(utcFormatter.parse(writeInstantTimeOpt.get()))
-
-    assertTrue(beforeWriteInstant.toEpochMilli < writeInstant.toEpochMilli,
-      s"writeInstant(${writeInstant.toEpochMilli}) must always be greater than beforeWriteInstant(${beforeWriteInstant.toEpochMilli}) if writeInstant was generated with UTC timezone")
-  }
-
   private def fetchActualSchema(): Schema = {
     val tableMetaClient = HoodieTableMetaClient.builder()
       .setConf(spark.sparkContext.hadoopConfiguration)