You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/12/02 21:23:31 UTC

spark git commit: [SPARK-4663][sql]add finally to avoid resource leak

Repository: spark
Updated Branches:
  refs/heads/master e75e04f98 -> 69b6fed20


[SPARK-4663][sql]add finally to avoid resource leak

Author: baishuo <vc...@hotmail.com>

Closes #3526 from baishuo/master-trycatch and squashes the following commits:

d446e14 [baishuo] correct the code style
b36bf96 [baishuo] correct the code style
ae0e447 [baishuo] add finally to avoid resource leak


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/69b6fed2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/69b6fed2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/69b6fed2

Branch: refs/heads/master
Commit: 69b6fed206565ecb0173d3757bcb5110422887c3
Parents: e75e04f
Author: baishuo <vc...@hotmail.com>
Authored: Tue Dec 2 12:12:03 2014 -0800
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Tue Dec 2 12:12:03 2014 -0800

----------------------------------------------------------------------
 .../spark/sql/parquet/ParquetTableOperations.scala       | 11 +++++++----
 1 file changed, 7 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/69b6fed2/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala
index 0e36852..232ef90 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala
@@ -302,11 +302,14 @@ case class InsertIntoParquetTable(
       val committer = format.getOutputCommitter(hadoopContext)
       committer.setupTask(hadoopContext)
       val writer = format.getRecordWriter(hadoopContext)
-      while (iter.hasNext) {
-        val row = iter.next()
-        writer.write(null, row)
+      try {
+        while (iter.hasNext) {
+          val row = iter.next()
+          writer.write(null, row)
+        }
+      } finally {
+        writer.close(hadoopContext)
       }
-      writer.close(hadoopContext)
       committer.commitTask(hadoopContext)
       1
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org