You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by vi...@apache.org on 2019/09/21 16:05:08 UTC

[incubator-hudi] branch master updated: HUDI-267 Refactor bad method name HoodieTestUtils#initTableType and HoodieTableMetaClient#initializePathAsHoodieDataset (#916)

This is an automated email from the ASF dual-hosted git repository.

vinoth pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new f020d02  HUDI-267 Refactor bad method name HoodieTestUtils#initTableType and HoodieTableMetaClient#initializePathAsHoodieDataset (#916)
f020d02 is described below

commit f020d029c406008b7342f1117b95b68b4a90d401
Author: vinoyang <ya...@gmail.com>
AuthorDate: Sun Sep 22 00:05:02 2019 +0800

    HUDI-267 Refactor bad method name HoodieTestUtils#initTableType and HoodieTableMetaClient#initializePathAsHoodieDataset (#916)
---
 .../src/test/java/org/apache/hudi/HoodieClientTestHarness.java |  2 +-
 hudi-client/src/test/java/org/apache/hudi/TestCleaner.java     |  4 ++--
 .../test/java/org/apache/hudi/TestCompactionAdminClient.java   |  2 +-
 .../src/test/java/org/apache/hudi/io/TestHoodieCompactor.java  |  4 ++--
 .../test/java/org/apache/hudi/table/TestMergeOnReadTable.java  |  6 +++---
 .../org/apache/hudi/common/table/HoodieTableMetaClient.java    |  6 +++---
 .../java/org/apache/hudi/common/model/HoodieTestUtils.java     | 10 +++++-----
 .../org/apache/hudi/common/table/log/HoodieLogFormatTest.java  |  2 +-
 .../java/org/apache/hudi/common/util/TestCompactionUtils.java  |  2 +-
 .../hudi/hadoop/realtime/HoodieRealtimeRecordReaderTest.java   |  8 ++++----
 .../java/org/apache/hudi/utilities/HDFSParquetImporter.java    |  2 +-
 11 files changed, 24 insertions(+), 24 deletions(-)

diff --git a/hudi-client/src/test/java/org/apache/hudi/HoodieClientTestHarness.java b/hudi-client/src/test/java/org/apache/hudi/HoodieClientTestHarness.java
index bb89964..84a9e21 100644
--- a/hudi-client/src/test/java/org/apache/hudi/HoodieClientTestHarness.java
+++ b/hudi-client/src/test/java/org/apache/hudi/HoodieClientTestHarness.java
@@ -200,7 +200,7 @@ public abstract class HoodieClientTestHarness implements Serializable {
       throw new IllegalStateException("The Spark context has not been initialized.");
     }
 
-    HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, getTableType());
+    HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, getTableType());
   }
 
   /**
diff --git a/hudi-client/src/test/java/org/apache/hudi/TestCleaner.java b/hudi-client/src/test/java/org/apache/hudi/TestCleaner.java
index b256e86..1e8df4a 100644
--- a/hudi-client/src/test/java/org/apache/hudi/TestCleaner.java
+++ b/hudi-client/src/test/java/org/apache/hudi/TestCleaner.java
@@ -515,7 +515,7 @@ public class TestCleaner extends TestHoodieClientBase {
             HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(1).build())
         .build();
 
-    HoodieTableMetaClient metaClient = HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath,
+    HoodieTableMetaClient metaClient = HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath,
         HoodieTableType.MERGE_ON_READ);
 
     // Make 3 files, one base file and 2 log files associated with base file
@@ -858,7 +858,7 @@ public class TestCleaner extends TestHoodieClientBase {
    */
   public void testPendingCompactions(HoodieWriteConfig config, int expNumFilesDeleted,
       int expNumFilesUnderCompactionDeleted) throws IOException {
-    HoodieTableMetaClient metaClient = HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath,
+    HoodieTableMetaClient metaClient = HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath,
         HoodieTableType.MERGE_ON_READ);
     String[] instants = new String[]{"000", "001", "003", "005", "007", "009", "011", "013"};
     String[] compactionInstants = new String[]{"002", "004", "006", "008", "010"};
diff --git a/hudi-client/src/test/java/org/apache/hudi/TestCompactionAdminClient.java b/hudi-client/src/test/java/org/apache/hudi/TestCompactionAdminClient.java
index 312ee08..3b6652a 100644
--- a/hudi-client/src/test/java/org/apache/hudi/TestCompactionAdminClient.java
+++ b/hudi-client/src/test/java/org/apache/hudi/TestCompactionAdminClient.java
@@ -53,7 +53,7 @@ public class TestCompactionAdminClient extends TestHoodieClientBase {
   public void setUp() throws Exception {
     initTempFolderAndPath();
     initSparkContexts();
-    metaClient = HoodieTestUtils.initTableType(HoodieTestUtils.getDefaultHadoopConf(), basePath, MERGE_ON_READ);
+    metaClient = HoodieTestUtils.init(HoodieTestUtils.getDefaultHadoopConf(), basePath, MERGE_ON_READ);
     client = new CompactionAdminClient(jsc, basePath);
   }
 
diff --git a/hudi-client/src/test/java/org/apache/hudi/io/TestHoodieCompactor.java b/hudi-client/src/test/java/org/apache/hudi/io/TestHoodieCompactor.java
index ac1fbad..8d9a676 100644
--- a/hudi-client/src/test/java/org/apache/hudi/io/TestHoodieCompactor.java
+++ b/hudi-client/src/test/java/org/apache/hudi/io/TestHoodieCompactor.java
@@ -62,7 +62,7 @@ public class TestHoodieCompactor extends HoodieClientTestHarness {
     initTempFolderAndPath();
     hadoopConf = HoodieTestUtils.getDefaultHadoopConf();
     fs = FSUtils.getFs(basePath, hadoopConf);
-    HoodieTestUtils.initTableType(hadoopConf, basePath, HoodieTableType.MERGE_ON_READ);
+    HoodieTestUtils.init(hadoopConf, basePath, HoodieTableType.MERGE_ON_READ);
     initTestDataGenerator();
   }
 
@@ -96,7 +96,7 @@ public class TestHoodieCompactor extends HoodieClientTestHarness {
 
   @Test(expected = HoodieNotSupportedException.class)
   public void testCompactionOnCopyOnWriteFail() throws Exception {
-    HoodieTestUtils.initTableType(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE);
+    HoodieTestUtils.init(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE);
     HoodieTableMetaClient metaClient = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
 
     HoodieTable table = HoodieTable.getHoodieTable(metaClient, getConfig(), jsc);
diff --git a/hudi-client/src/test/java/org/apache/hudi/table/TestMergeOnReadTable.java b/hudi-client/src/test/java/org/apache/hudi/table/TestMergeOnReadTable.java
index a4789e9..833b39a 100644
--- a/hudi-client/src/test/java/org/apache/hudi/table/TestMergeOnReadTable.java
+++ b/hudi-client/src/test/java/org/apache/hudi/table/TestMergeOnReadTable.java
@@ -85,7 +85,7 @@ public class TestMergeOnReadTable extends HoodieClientTestHarness {
     jsc.hadoopConfiguration().addResource(dfs.getConf());
     initTempFolderAndPath();
     dfs.mkdirs(new Path(basePath));
-    HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
+    HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
     initTestDataGenerator();
   }
 
@@ -294,7 +294,7 @@ public class TestMergeOnReadTable extends HoodieClientTestHarness {
   public void testCOWToMORConvertedDatasetRollback() throws Exception {
 
     //Set TableType to COW
-    HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableType.COPY_ON_WRITE);
+    HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, HoodieTableType.COPY_ON_WRITE);
 
     HoodieWriteConfig cfg = getConfig(true);
     try (HoodieWriteClient client = getWriteClient(cfg);) {
@@ -330,7 +330,7 @@ public class TestMergeOnReadTable extends HoodieClientTestHarness {
       assertNoWriteErrors(statuses);
 
       //Set TableType to MOR
-      HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
+      HoodieTestUtils.init(jsc.hadoopConfiguration(), basePath, HoodieTableType.MERGE_ON_READ);
 
       //rollback a COW commit when TableType is MOR
       client.rollback(newCommitTime);
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
index 3d441a0..479db69 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
@@ -273,7 +273,7 @@ public class HoodieTableMetaClient implements Serializable {
     properties.put(HoodieTableConfig.HOODIE_TABLE_NAME_PROP_NAME, tableName);
     properties.put(HoodieTableConfig.HOODIE_TABLE_TYPE_PROP_NAME, type.name());
     properties.put(HoodieTableConfig.HOODIE_ARCHIVELOG_FOLDER_PROP_NAME, archiveLogFolder);
-    return HoodieTableMetaClient.initializePathAsHoodieDataset(hadoopConf, basePath, properties);
+    return HoodieTableMetaClient.initDatasetAndGetMetaClient(hadoopConf, basePath, properties);
   }
 
   /**
@@ -287,7 +287,7 @@ public class HoodieTableMetaClient implements Serializable {
     if (tableType == HoodieTableType.MERGE_ON_READ) {
       properties.setProperty(HoodieTableConfig.HOODIE_PAYLOAD_CLASS_PROP_NAME, payloadClassName);
     }
-    return HoodieTableMetaClient.initializePathAsHoodieDataset(hadoopConf, basePath, properties);
+    return HoodieTableMetaClient.initDatasetAndGetMetaClient(hadoopConf, basePath, properties);
   }
 
   /**
@@ -296,7 +296,7 @@ public class HoodieTableMetaClient implements Serializable {
    *
    * @return Instance of HoodieTableMetaClient
    */
-  public static HoodieTableMetaClient initializePathAsHoodieDataset(Configuration hadoopConf,
+  public static HoodieTableMetaClient initDatasetAndGetMetaClient(Configuration hadoopConf,
       String basePath, Properties props) throws IOException {
     log.info("Initializing " + basePath + " as hoodie dataset " + basePath);
     Path basePathDir = new Path(basePath);
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/model/HoodieTestUtils.java b/hudi-common/src/test/java/org/apache/hudi/common/model/HoodieTestUtils.java
index 056c069..64faf3b 100644
--- a/hudi-common/src/test/java/org/apache/hudi/common/model/HoodieTestUtils.java
+++ b/hudi-common/src/test/java/org/apache/hudi/common/model/HoodieTestUtils.java
@@ -96,21 +96,21 @@ public class HoodieTestUtils {
   }
 
   public static HoodieTableMetaClient init(String basePath, HoodieTableType tableType) throws IOException {
-    return initTableType(getDefaultHadoopConf(), basePath, tableType);
+    return init(getDefaultHadoopConf(), basePath, tableType);
   }
 
   public static HoodieTableMetaClient init(Configuration hadoopConf, String basePath)
       throws IOException {
-    return initTableType(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE);
+    return init(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE);
   }
 
-  public static HoodieTableMetaClient initTableType(Configuration hadoopConf, String basePath,
-      HoodieTableType tableType) throws IOException {
+  public static HoodieTableMetaClient init(Configuration hadoopConf, String basePath, HoodieTableType tableType)
+      throws IOException {
     Properties properties = new Properties();
     properties.setProperty(HoodieTableConfig.HOODIE_TABLE_NAME_PROP_NAME, RAW_TRIPS_TEST_NAME);
     properties.setProperty(HoodieTableConfig.HOODIE_TABLE_TYPE_PROP_NAME, tableType.name());
     properties.setProperty(HoodieTableConfig.HOODIE_PAYLOAD_CLASS_PROP_NAME, HoodieAvroPayload.class.getName());
-    return HoodieTableMetaClient.initializePathAsHoodieDataset(hadoopConf, basePath, properties);
+    return HoodieTableMetaClient.initDatasetAndGetMetaClient(hadoopConf, basePath, properties);
   }
 
   public static String makeNewCommitTime() {
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/table/log/HoodieLogFormatTest.java b/hudi-common/src/test/java/org/apache/hudi/common/table/log/HoodieLogFormatTest.java
index 4df738f..8b6b597 100644
--- a/hudi-common/src/test/java/org/apache/hudi/common/table/log/HoodieLogFormatTest.java
+++ b/hudi-common/src/test/java/org/apache/hudi/common/table/log/HoodieLogFormatTest.java
@@ -113,7 +113,7 @@ public class HoodieLogFormatTest {
     assertTrue(fs.mkdirs(new Path(folder.getRoot().getPath())));
     this.partitionPath = new Path(folder.getRoot().getPath());
     this.basePath = folder.getRoot().getParent();
-    HoodieTestUtils.initTableType(MiniClusterUtil.configuration, basePath, HoodieTableType.MERGE_ON_READ);
+    HoodieTestUtils.init(MiniClusterUtil.configuration, basePath, HoodieTableType.MERGE_ON_READ);
   }
 
   @After
diff --git a/hudi-common/src/test/java/org/apache/hudi/common/util/TestCompactionUtils.java b/hudi-common/src/test/java/org/apache/hudi/common/util/TestCompactionUtils.java
index a5a139d..11e0e7b 100644
--- a/hudi-common/src/test/java/org/apache/hudi/common/util/TestCompactionUtils.java
+++ b/hudi-common/src/test/java/org/apache/hudi/common/util/TestCompactionUtils.java
@@ -65,7 +65,7 @@ public class TestCompactionUtils {
 
   @Before
   public void init() throws IOException {
-    metaClient = HoodieTestUtils.initTableType(getDefaultHadoopConf(),
+    metaClient = HoodieTestUtils.init(getDefaultHadoopConf(),
         tmpFolder.getRoot().getAbsolutePath(), HoodieTableType.MERGE_ON_READ);
     basePath = metaClient.getBasePath();
   }
diff --git a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeRecordReaderTest.java b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeRecordReaderTest.java
index a583a34..4fad4ae 100644
--- a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeRecordReaderTest.java
+++ b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/realtime/HoodieRealtimeRecordReaderTest.java
@@ -172,7 +172,7 @@ public class HoodieRealtimeRecordReaderTest {
   public void testReader(boolean partitioned) throws Exception {
     // initial commit
     Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getEvolvedSchema());
-    HoodieTestUtils.initTableType(hadoopConf, basePath.getRoot().getAbsolutePath(),
+    HoodieTestUtils.init(hadoopConf, basePath.getRoot().getAbsolutePath(),
         HoodieTableType.MERGE_ON_READ);
     String baseInstant = "100";
     File partitionDir =
@@ -263,7 +263,7 @@ public class HoodieRealtimeRecordReaderTest {
   public void testUnMergedReader() throws Exception {
     // initial commit
     Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getEvolvedSchema());
-    HoodieTestUtils.initTableType(hadoopConf, basePath.getRoot().getAbsolutePath(),
+    HoodieTestUtils.init(hadoopConf, basePath.getRoot().getAbsolutePath(),
         HoodieTableType.MERGE_ON_READ);
     String commitTime = "100";
     final int numRecords = 1000;
@@ -347,7 +347,7 @@ public class HoodieRealtimeRecordReaderTest {
   public void testReaderWithNestedAndComplexSchema() throws Exception {
     // initial commit
     Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getComplexEvolvedSchema());
-    HoodieTestUtils.initTableType(hadoopConf, basePath.getRoot().getAbsolutePath(),
+    HoodieTestUtils.init(hadoopConf, basePath.getRoot().getAbsolutePath(),
         HoodieTableType.MERGE_ON_READ);
     String commitTime = "100";
     int numberOfRecords = 100;
@@ -489,7 +489,7 @@ public class HoodieRealtimeRecordReaderTest {
     // initial commit
     List<String> logFilePaths = new ArrayList<>();
     Schema schema = HoodieAvroUtils.addMetadataFields(SchemaTestUtil.getSimpleSchema());
-    HoodieTestUtils.initTableType(hadoopConf, basePath.getRoot().getAbsolutePath(),
+    HoodieTestUtils.init(hadoopConf, basePath.getRoot().getAbsolutePath(),
         HoodieTableType.MERGE_ON_READ);
     String commitTime = "100";
     int numberOfRecords = 100;
diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java
index 6af7d31..2dc907c 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java
@@ -131,7 +131,7 @@ public class HDFSParquetImporter implements Serializable {
       properties.put(HoodieTableConfig.HOODIE_TABLE_NAME_PROP_NAME, cfg.tableName);
       properties.put(HoodieTableConfig.HOODIE_TABLE_TYPE_PROP_NAME, cfg.tableType);
       HoodieTableMetaClient
-          .initializePathAsHoodieDataset(jsc.hadoopConfiguration(), cfg.targetPath, properties);
+          .initDatasetAndGetMetaClient(jsc.hadoopConfiguration(), cfg.targetPath, properties);
 
       HoodieWriteClient client = UtilHelpers.createHoodieClient(jsc, cfg.targetPath, schemaStr,
           cfg.parallelism, Option.empty(), props);