You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@parquet.apache.org by ju...@apache.org on 2016/06/30 16:42:15 UTC

parquet-mr git commit: PARQUET-544: Add closed flag to allow for closeable contract adherence

Repository: parquet-mr
Updated Branches:
  refs/heads/master da69d4b76 -> 1f470253c


PARQUET-544: Add closed flag to allow for closeable contract adherence

The closeable interface states:
> Closes this stream and releases any system resources associated with it. If the stream is already closed then invoking this method has no effect.

As InternalParquetRecordWriter implements this interface we should adhere to this contract.

Author: Mark Reddy <ma...@gmail.com>

Closes #345 from markreddy/PARQUET-544-adhere-to-closeable-contract and squashes the following commits:

135db9b [Mark Reddy] PARQUET-544: add closed flag to allow for adherence to closeable contract


Project: http://git-wip-us.apache.org/repos/asf/parquet-mr/repo
Commit: http://git-wip-us.apache.org/repos/asf/parquet-mr/commit/1f470253
Tree: http://git-wip-us.apache.org/repos/asf/parquet-mr/tree/1f470253
Diff: http://git-wip-us.apache.org/repos/asf/parquet-mr/diff/1f470253

Branch: refs/heads/master
Commit: 1f470253c46471033048383c027192e757480492
Parents: da69d4b
Author: Mark Reddy <ma...@gmail.com>
Authored: Thu Jun 30 09:41:51 2016 -0700
Committer: Julien Le Dem <ju...@dremio.com>
Committed: Thu Jun 30 09:41:51 2016 -0700

----------------------------------------------------------------------
 .../hadoop/InternalParquetRecordWriter.java     | 21 ++++++++++++--------
 1 file changed, 13 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/1f470253/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java
index 74feb39..6c3dac5 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java
@@ -56,6 +56,8 @@ class InternalParquetRecordWriter<T> {
   private final boolean validating;
   private final ParquetProperties props;
 
+  private boolean closed;
+
   private long recordCount = 0;
   private long recordCountForNextMemCheck = MINIMUM_RECORD_COUNT_FOR_CHECK;
   private long lastRowGroupEndPos = 0;
@@ -103,15 +105,18 @@ class InternalParquetRecordWriter<T> {
   }
 
   public void close() throws IOException, InterruptedException {
-    flushRowGroupToStore();
-    FinalizedWriteContext finalWriteContext = writeSupport.finalizeWrite();
-    Map<String, String> finalMetadata = new HashMap<String, String>(extraMetaData);
-    String modelName = writeSupport.getName();
-    if (modelName != null) {
-      finalMetadata.put(ParquetWriter.OBJECT_MODEL_NAME_PROP, modelName);
+    if (!closed) {
+      flushRowGroupToStore();
+      FinalizedWriteContext finalWriteContext = writeSupport.finalizeWrite();
+      Map<String, String> finalMetadata = new HashMap<String, String>(extraMetaData);
+      String modelName = writeSupport.getName();
+      if (modelName != null) {
+        finalMetadata.put(ParquetWriter.OBJECT_MODEL_NAME_PROP, modelName);
+      }
+      finalMetadata.putAll(finalWriteContext.getExtraMetaData());
+      parquetFileWriter.end(finalMetadata);
+      closed = true;
     }
-    finalMetadata.putAll(finalWriteContext.getExtraMetaData());
-    parquetFileWriter.end(finalMetadata);
   }
 
   public void write(T value) throws IOException, InterruptedException {