You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by an...@apache.org on 2015/12/16 03:15:15 UTC
spark git commit: [SPARK-12056][CORE] Part 2 Create a
TaskAttemptContext only after calling setConf
Repository: spark
Updated Branches:
refs/heads/master 840bd2e00 -> f725b2ec1
[SPARK-12056][CORE] Part 2 Create a TaskAttemptContext only after calling setConf
This is continuation of SPARK-12056 where change is applied to SqlNewHadoopRDD.scala
andrewor14
FYI
Author: tedyu <yu...@gmail.com>
Closes #10164 from tedyu/master.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f725b2ec
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f725b2ec
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f725b2ec
Branch: refs/heads/master
Commit: f725b2ec1ab0d89e35b5e2d3ddeddb79fec85f6d
Parents: 840bd2e
Author: tedyu <yu...@gmail.com>
Authored: Tue Dec 15 18:15:10 2015 -0800
Committer: Andrew Or <an...@databricks.com>
Committed: Tue Dec 15 18:15:10 2015 -0800
----------------------------------------------------------------------
.../apache/spark/sql/execution/datasources/SqlNewHadoopRDD.scala | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/f725b2ec/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/SqlNewHadoopRDD.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/SqlNewHadoopRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/SqlNewHadoopRDD.scala
index 56cb63d..eea780c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/SqlNewHadoopRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/SqlNewHadoopRDD.scala
@@ -148,14 +148,14 @@ private[spark] class SqlNewHadoopRDD[V: ClassTag](
}
inputMetrics.setBytesReadCallback(bytesReadCallback)
- val attemptId = newTaskAttemptID(jobTrackerId, id, isMap = true, split.index, 0)
- val hadoopAttemptContext = newTaskAttemptContext(conf, attemptId)
val format = inputFormatClass.newInstance
format match {
case configurable: Configurable =>
configurable.setConf(conf)
case _ =>
}
+ val attemptId = newTaskAttemptID(jobTrackerId, id, isMap = true, split.index, 0)
+ val hadoopAttemptContext = newTaskAttemptContext(conf, attemptId)
private[this] var reader: RecordReader[Void, V] = null
/**
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org