You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by td...@apache.org on 2014/05/20 07:36:28 UTC
git commit: [Spark 1877] ClassNotFoundException when loading RDD with
serialized objects
Repository: spark
Updated Branches:
refs/heads/master bcb9dce6f -> 52eb54d02
[Spark 1877] ClassNotFoundException when loading RDD with serialized objects
Updated version of #821
Author: Tathagata Das <ta...@gmail.com>
Author: Ghidireac <bo...@u448a5b0a73d45358d94a.ant.amazon.com>
Closes #835 from tdas/SPARK-1877 and squashes the following commits:
f346f71 [Tathagata Das] Addressed Patrick's comments.
fee0c5d [Ghidireac] SPARK-1877: ClassNotFoundException when loading RDD with serialized objects
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/52eb54d0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/52eb54d0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/52eb54d0
Branch: refs/heads/master
Commit: 52eb54d02403a3c37d84b9da7cc1cdb261048cf8
Parents: bcb9dce
Author: Tathagata Das <ta...@gmail.com>
Authored: Mon May 19 22:36:24 2014 -0700
Committer: Tathagata Das <ta...@gmail.com>
Committed: Mon May 19 22:36:24 2014 -0700
----------------------------------------------------------------------
core/src/main/scala/org/apache/spark/SparkContext.scala | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/52eb54d0/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 634c10c..49737fa 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -718,7 +718,7 @@ class SparkContext(config: SparkConf) extends Logging {
minPartitions: Int = defaultMinPartitions
): RDD[T] = {
sequenceFile(path, classOf[NullWritable], classOf[BytesWritable], minPartitions)
- .flatMap(x => Utils.deserialize[Array[T]](x._2.getBytes))
+ .flatMap(x => Utils.deserialize[Array[T]](x._2.getBytes, Utils.getContextOrSparkClassLoader))
}
protected[spark] def checkpointFile[T: ClassTag](