You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2021/04/15 05:25:29 UTC
[spark] branch branch-3.1 updated: [SPARK-34225][CORE][FOLLOWUP]
Replace Hadoop's Path with Utils.resolveURI to make the way to get URI
simple
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.1 by this push:
new 71ab0c8 [SPARK-34225][CORE][FOLLOWUP] Replace Hadoop's Path with Utils.resolveURI to make the way to get URI simple
71ab0c8 is described below
commit 71ab0c87c1f480584400b96d70d1e4be99748ed3
Author: Kousuke Saruta <sa...@oss.nttdata.com>
AuthorDate: Wed Apr 14 22:24:29 2021 -0700
[SPARK-34225][CORE][FOLLOWUP] Replace Hadoop's Path with Utils.resolveURI to make the way to get URI simple
### What changes were proposed in this pull request?
This PR proposes to replace Hadoop's `Path` with `Utils.resolveURI` to make the way to get URI simple in `SparkContext`.
### Why are the changes needed?
Keep the code simple.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Existing tests.
Closes #32164 from sarutak/followup-SPARK-34225.
Authored-by: Kousuke Saruta <sa...@oss.nttdata.com>
Signed-off-by: Dongjoon Hyun <dh...@apple.com>
(cherry picked from commit 767ea86ecf60dd85a925ec5111f0b16dd931c1fe)
Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
core/src/main/scala/org/apache/spark/SparkContext.scala | 16 ++--------------
1 file changed, 2 insertions(+), 14 deletions(-)
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 4d6dec3..f0e8d33 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1583,15 +1583,7 @@ class SparkContext(config: SparkConf) extends Logging {
private def addFile(
path: String, recursive: Boolean, addedOnSubmit: Boolean, isArchive: Boolean = false
): Unit = {
- val uri = if (!isArchive) {
- if (Utils.isAbsoluteURI(path) && path.contains("%")) {
- new URI(path)
- } else {
- new Path(path).toUri
- }
- } else {
- Utils.resolveURI(path)
- }
+ val uri = Utils.resolveURI(path)
val schemeCorrectedURI = uri.getScheme match {
case null => new File(path).getCanonicalFile.toURI
case "local" =>
@@ -1979,11 +1971,7 @@ class SparkContext(config: SparkConf) extends Logging {
// For local paths with backslashes on Windows, URI throws an exception
addLocalJarFile(new File(path))
} else {
- val uri = if (Utils.isAbsoluteURI(path) && path.contains("%")) {
- new URI(path)
- } else {
- new Path(path).toUri
- }
+ val uri = Utils.resolveURI(path)
// SPARK-17650: Make sure this is a valid URL before adding it to the list of dependencies
Utils.validateURL(uri)
uri.getScheme match {
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org