You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2017/07/20 11:16:35 UTC
spark git commit: [SPARK-21477][SQL][MINOR] Mark LocalTableScanExec's
input data transient
Repository: spark
Updated Branches:
refs/heads/master 5b61cc6d6 -> 256358f66
[SPARK-21477][SQL][MINOR] Mark LocalTableScanExec's input data transient
## What changes were proposed in this pull request?
This PR is to mark the parameter `rows` and `unsafeRow` of LocalTableScanExec transient. It can avoid serializing the unneeded objects.
## How was this patch tested?
N/A
Author: gatorsmile <ga...@gmail.com>
Closes #18686 from gatorsmile/LocalTableScanExec.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/256358f6
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/256358f6
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/256358f6
Branch: refs/heads/master
Commit: 256358f66a975aa62eb81378e4e073fc2d15644b
Parents: 5b61cc6
Author: gatorsmile <ga...@gmail.com>
Authored: Thu Jul 20 19:16:26 2017 +0800
Committer: Wenchen Fan <we...@databricks.com>
Committed: Thu Jul 20 19:16:26 2017 +0800
----------------------------------------------------------------------
.../org/apache/spark/sql/execution/LocalTableScanExec.scala | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/256358f6/sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScanExec.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScanExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScanExec.scala
index 19c68c1..514ad70 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScanExec.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScanExec.scala
@@ -28,12 +28,12 @@ import org.apache.spark.sql.execution.metric.SQLMetrics
*/
case class LocalTableScanExec(
output: Seq[Attribute],
- rows: Seq[InternalRow]) extends LeafExecNode {
+ @transient rows: Seq[InternalRow]) extends LeafExecNode {
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
- private lazy val unsafeRows: Array[InternalRow] = {
+ @transient private lazy val unsafeRows: Array[InternalRow] = {
if (rows.isEmpty) {
Array.empty
} else {
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org