You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iceberg.apache.org by op...@apache.org on 2022/03/24 10:31:04 UTC
[iceberg] branch master updated: ORC : Avoid modifying existing conf of HadoopOutputFile rather create new one (#4384)
This is an automated email from the ASF dual-hosted git repository.
openinx pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iceberg.git
The following commit(s) were added to refs/heads/master by this push:
new a314fca ORC : Avoid modifying existing conf of HadoopOutputFile rather create new one (#4384)
a314fca is described below
commit a314fca340f65170b912958c508f925e6d1c0afa
Author: Prashant Singh <35...@users.noreply.github.com>
AuthorDate: Thu Mar 24 16:00:47 2022 +0530
ORC : Avoid modifying existing conf of HadoopOutputFile rather create new one (#4384)
---
orc/src/main/java/org/apache/iceberg/orc/ORC.java | 15 +++++++--------
1 file changed, 7 insertions(+), 8 deletions(-)
diff --git a/orc/src/main/java/org/apache/iceberg/orc/ORC.java b/orc/src/main/java/org/apache/iceberg/orc/ORC.java
index 12458d1..78dd62b 100644
--- a/orc/src/main/java/org/apache/iceberg/orc/ORC.java
+++ b/orc/src/main/java/org/apache/iceberg/orc/ORC.java
@@ -83,6 +83,7 @@ public class ORC {
public static class WriteBuilder {
private final OutputFile file;
+ private final Configuration conf;
private Schema schema = null;
private BiFunction<Schema, TypeDescription, OrcRowWriter<?>> createWriterFunc;
private Map<String, byte[]> metadata = Maps.newHashMap();
@@ -93,6 +94,11 @@ public class ORC {
private WriteBuilder(OutputFile file) {
this.file = file;
+ if (file instanceof HadoopOutputFile) {
+ this.conf = new Configuration(((HadoopOutputFile) file).getConf());
+ } else {
+ this.conf = new Configuration();
+ }
}
public WriteBuilder forTable(Table table) {
@@ -162,15 +168,8 @@ public class ORC {
public <D> FileAppender<D> build() {
Preconditions.checkNotNull(schema, "Schema is required");
- Configuration conf;
- if (file instanceof HadoopOutputFile) {
- conf = ((HadoopOutputFile) file).getConf();
- } else {
- conf = new Configuration();
- }
-
for (Map.Entry<String, String> entry : config.entrySet()) {
- conf.set(entry.getKey(), entry.getValue());
+ this.conf.set(entry.getKey(), entry.getValue());
}
// for compatibility