You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2023/12/11 02:46:50 UTC
(spark) branch master updated: [SPARK-46215][CORE][FOLLOWUP] Handle symbolic links
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new bacdb3b5fec [SPARK-46215][CORE][FOLLOWUP] Handle symbolic links
bacdb3b5fec is described below
commit bacdb3b5fec9783f46042764eeee80eb2a0f5702
Author: Dongjoon Hyun <dh...@apple.com>
AuthorDate: Sun Dec 10 18:46:37 2023 -0800
[SPARK-46215][CORE][FOLLOWUP] Handle symbolic links
### What changes were proposed in this pull request?
This PR aims to fix a regression on symbolic links.
### Why are the changes needed?
To have the same behavior with symbolic links.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Pass the CIs with newly added test case.
I also verified this on Mac's `/tmp` directory.
```
$ ls -al /tmp
lrwxr-xr-x 1 root wheel 11 Nov 17 02:37 /tmp -> private/tmp
```
**MASTER**
```
23/12/10 16:04:53 INFO FileSystemRecoveryModeFactory: Persisting recovery state to directory: /tmp
23/12/10 16:04:53 INFO Master: I have been elected leader! New state: ALIVE
23/12/10 16:08:39 INFO Master: Registering worker 127.0.0.1:50535 with 8 cores, 15.0 GiB RAM
```
**PERSISTED DATA**
```
$ ls -al /tmp/worker_*
-rw-r--r-- 1 dongjoon wheel 1354 Dec 10 16:08 /tmp/worker_worker-20231210160839-127.0.0.1-50535
```
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #44285 from dongjoon-hyun/SPARK-46215-2.
Authored-by: Dongjoon Hyun <dh...@apple.com>
Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
.../deploy/master/FileSystemPersistenceEngine.scala | 9 +++++++--
.../spark/deploy/master/PersistenceEngineSuite.scala | 16 ++++++++++++++++
2 files changed, 23 insertions(+), 2 deletions(-)
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
index 785367a0dee..fb067f10c5a 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
@@ -18,7 +18,7 @@
package org.apache.spark.deploy.master
import java.io._
-import java.nio.file.{Files, Paths}
+import java.nio.file.{FileAlreadyExistsException, Files, Paths}
import scala.reflect.ClassTag
@@ -42,7 +42,12 @@ private[master] class FileSystemPersistenceEngine(
val codec: Option[CompressionCodec] = None)
extends PersistenceEngine with Logging {
- Files.createDirectories(Paths.get(dir))
+ try {
+ Files.createDirectories(Paths.get(dir))
+ } catch {
+ case _: FileAlreadyExistsException if Files.isSymbolicLink(Paths.get(dir)) =>
+ Files.createDirectories(Paths.get(dir).toRealPath())
+ }
override def persist(name: String, obj: Object): Unit = {
serializeIntoFile(new File(dir + File.separator + name), obj)
diff --git a/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala b/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala
index 84181ea3fca..b977a114244 100644
--- a/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala
@@ -19,6 +19,7 @@
package org.apache.spark.deploy.master
import java.net.ServerSocket
+import java.nio.file.{Files, Paths}
import java.util.concurrent.ThreadLocalRandom
import org.apache.curator.test.TestingServer
@@ -72,6 +73,21 @@ class PersistenceEngineSuite extends SparkFunSuite {
}
}
+ test("SPARK-46215: FileSystemPersistenceEngine with a symbolic link") {
+ withTempDir { dir =>
+ val target = Paths.get(dir.getAbsolutePath(), "target")
+ val link = Paths.get(dir.getAbsolutePath(), "symbolic_link");
+
+ Files.createDirectories(target)
+ Files.createSymbolicLink(link, target);
+
+ val conf = new SparkConf()
+ testPersistenceEngine(conf, serializer =>
+ new FileSystemPersistenceEngine(link.toAbsolutePath.toString, serializer)
+ )
+ }
+ }
+
test("SPARK-46205: Support KryoSerializer in FileSystemPersistenceEngine") {
withTempDir { dir =>
val conf = new SparkConf()
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org