You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by "nsivabalan (via GitHub)" <gi...@apache.org> on 2023/04/03 06:51:32 UTC

[GitHub] [hudi] nsivabalan commented on a diff in pull request #8290: [HUDI-5983] Improve loading data via cloud store incr source

nsivabalan commented on code in PR #8290:
URL: https://github.com/apache/hudi/pull/8290#discussion_r1155545875


##########
hudi-utilities/src/main/java/org/apache/hudi/utilities/sources/helpers/gcs/GcsObjectsFetcher.java:
##########
@@ -21,29 +21,33 @@
 import org.apache.hudi.common.config.SerializableConfiguration;
 import org.apache.hudi.common.config.TypedProperties;
 import org.apache.hudi.common.util.Option;
-import org.apache.hudi.utilities.sources.helpers.CloudObjectsSelectorCommon;
+import org.apache.hudi.utilities.sources.helpers.CloudObject;
+
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.Encoders;
 import org.apache.spark.sql.Row;
 
 import java.io.Serializable;
 import java.util.List;
+
 import static org.apache.hudi.common.util.StringUtils.isNullOrEmpty;
+import static org.apache.hudi.utilities.sources.helpers.CloudObjectsSelectorCommon.getCloudObjectsPerPartition;
 import static org.apache.hudi.utilities.sources.helpers.CloudStoreIngestionConfig.CLOUD_DATAFILE_EXTENSION;
 import static org.apache.hudi.utilities.sources.helpers.CloudStoreIngestionConfig.IGNORE_RELATIVE_PATH_PREFIX;
 import static org.apache.hudi.utilities.sources.helpers.CloudStoreIngestionConfig.IGNORE_RELATIVE_PATH_SUBSTR;
 import static org.apache.hudi.utilities.sources.helpers.CloudStoreIngestionConfig.SELECT_RELATIVE_PATH_PREFIX;
 
 /**
- * Extracts a list of fully qualified GCS filepaths from a given Spark Dataset as input.
+ * Extracts a list of GCS {@link CloudObject} containing filepaths from a given Spark Dataset as input.
  * Optionally:
  * i) Match the filename and path against provided input filter strings
  * ii) Check if each file exists on GCS, in which case it assumes SparkContext is already
  * configured with GCS options through GcsEventsHoodieIncrSource.addGcsAccessConfs().
  */
-public class FilePathsFetcher implements Serializable {
+public class GcsObjectsFetcher implements Serializable {

Review Comment:
   sg. 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@hudi.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org