You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by vi...@apache.org on 2019/11/30 16:02:41 UTC

[incubator-hudi] branch master updated: [HUDI-372] Support the shortName for Hudi DataSource (#1054)

This is an automated email from the ASF dual-hosted git repository.

vinoth pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 024230f  [HUDI-372] Support the shortName for Hudi DataSource (#1054)
024230f is described below

commit 024230fbd23173db38e2c0f66808606481223003
Author: lamber-ken <la...@163.com>
AuthorDate: Sun Dec 1 00:02:33 2019 +0800

    [HUDI-372] Support the shortName for Hudi DataSource (#1054)
    
    - Ability to do `spark.write.format("hudi")...`
---
 .../org.apache.spark.sql.sources.DataSourceRegister  | 20 ++++++++++++++++++++
 .../main/scala/org/apache/hudi/DefaultSource.scala   |  2 +-
 hudi-spark/src/test/scala/TestDataSource.scala       | 13 +++++++++++++
 3 files changed, 34 insertions(+), 1 deletion(-)

diff --git a/hudi-spark/src/main/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister b/hudi-spark/src/main/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
new file mode 100644
index 0000000..ea82b80
--- /dev/null
+++ b/hudi-spark/src/main/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
@@ -0,0 +1,20 @@
+################################################################################
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+# limitations under the License.
+################################################################################
+
+
+org.apache.hudi.DefaultSource
\ No newline at end of file
diff --git a/hudi-spark/src/main/scala/org/apache/hudi/DefaultSource.scala b/hudi-spark/src/main/scala/org/apache/hudi/DefaultSource.scala
index 18f3dba..f50d90f 100644
--- a/hudi-spark/src/main/scala/org/apache/hudi/DefaultSource.scala
+++ b/hudi-spark/src/main/scala/org/apache/hudi/DefaultSource.scala
@@ -104,5 +104,5 @@ class DefaultSource extends RelationProvider
       outputMode)
   }
 
-  override def shortName(): String = "hoodie"
+  override def shortName(): String = "hudi"
 }
diff --git a/hudi-spark/src/test/scala/TestDataSource.scala b/hudi-spark/src/test/scala/TestDataSource.scala
index d7ea714..587c55a 100644
--- a/hudi-spark/src/test/scala/TestDataSource.scala
+++ b/hudi-spark/src/test/scala/TestDataSource.scala
@@ -63,6 +63,19 @@ class TestDataSource extends AssertionsForJUnit {
     fs = FSUtils.getFs(basePath, spark.sparkContext.hadoopConfiguration)
   }
 
+  @Test def testShortNameStorage() {
+    // Insert Operation
+    val records = DataSourceTestUtils.convertToStringList(dataGen.generateInserts("000", 100)).toList
+    val inputDF: Dataset[Row] = spark.read.json(spark.sparkContext.parallelize(records, 2))
+    inputDF.write.format("hudi")
+      .options(commonOpts)
+      .option(DataSourceWriteOptions.OPERATION_OPT_KEY, DataSourceWriteOptions.INSERT_OPERATION_OPT_VAL)
+      .mode(SaveMode.Overwrite)
+      .save(basePath)
+
+    assertTrue(HoodieDataSourceHelpers.hasNewCommits(fs, basePath, "000"))
+  }
+
   @Test def testCopyOnWriteStorage() {
     // Insert Operation
     val records1 = DataSourceTestUtils.convertToStringList(dataGen.generateInserts("000", 100)).toList