You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by hv...@apache.org on 2017/03/16 14:25:51 UTC

spark git commit: [SPARK-19946][TESTING] DebugFilesystem.assertNoOpenStreams should report the open streams to help debugging

Repository: spark
Updated Branches:
  refs/heads/master d647aae27 -> ee91a0dec


[SPARK-19946][TESTING] DebugFilesystem.assertNoOpenStreams should report the open streams to help debugging

## What changes were proposed in this pull request?

DebugFilesystem.assertNoOpenStreams throws an exception with a cause exception that actually shows the code line which leaked the stream.

## How was this patch tested?
New test in SparkContextSuite to check there is a cause exception.

Author: Bogdan Raducanu <bo...@databricks.com>

Closes #17292 from bogdanrdc/SPARK-19946.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ee91a0de
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ee91a0de
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ee91a0de

Branch: refs/heads/master
Commit: ee91a0decc389572099ea7c038149cc50375a2ef
Parents: d647aae
Author: Bogdan Raducanu <bo...@databricks.com>
Authored: Thu Mar 16 15:25:45 2017 +0100
Committer: Herman van Hovell <hv...@databricks.com>
Committed: Thu Mar 16 15:25:45 2017 +0100

----------------------------------------------------------------------
 .../org/apache/spark/DebugFilesystem.scala      |  3 ++-
 .../org/apache/spark/SparkContextSuite.scala    | 20 +++++++++++++++++++-
 2 files changed, 21 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/ee91a0de/core/src/test/scala/org/apache/spark/DebugFilesystem.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/DebugFilesystem.scala b/core/src/test/scala/org/apache/spark/DebugFilesystem.scala
index fb8d701..72aea84 100644
--- a/core/src/test/scala/org/apache/spark/DebugFilesystem.scala
+++ b/core/src/test/scala/org/apache/spark/DebugFilesystem.scala
@@ -44,7 +44,8 @@ object DebugFilesystem extends Logging {
         logWarning("Leaked filesystem connection created at:")
         exc.printStackTrace()
       }
-      throw new RuntimeException(s"There are $numOpen possibly leaked file streams.")
+      throw new IllegalStateException(s"There are $numOpen possibly leaked file streams.",
+        openStreams.values().asScala.head)
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/ee91a0de/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index f97a112..d08a162 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -18,7 +18,7 @@
 package org.apache.spark
 
 import java.io.File
-import java.net.MalformedURLException
+import java.net.{MalformedURLException, URI}
 import java.nio.charset.StandardCharsets
 import java.util.concurrent.TimeUnit
 
@@ -26,6 +26,8 @@ import scala.concurrent.duration._
 import scala.concurrent.Await
 
 import com.google.common.io.Files
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.{FileSystem, Path}
 import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
 import org.apache.hadoop.mapred.TextInputFormat
 import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
@@ -538,6 +540,22 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
     }
   }
 
+  test("SPARK-19446: DebugFilesystem.assertNoOpenStreams should report " +
+    "open streams to help debugging") {
+    val fs = new DebugFilesystem()
+    fs.initialize(new URI("file:///"), new Configuration())
+    val file = File.createTempFile("SPARK19446", "temp")
+    Files.write(Array.ofDim[Byte](1000), file)
+    val path = new Path("file:///" + file.getCanonicalPath)
+    val stream = fs.open(path)
+    val exc = intercept[RuntimeException] {
+      DebugFilesystem.assertNoOpenStreams()
+    }
+    assert(exc != null)
+    assert(exc.getCause() != null)
+    stream.close()
+  }
+
 }
 
 object SparkContextSuite {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org