You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@parquet.apache.org by fo...@apache.org on 2020/01/02 12:46:55 UTC

[parquet-mr] branch master updated: PARQUET-1723: Read From Maps without using .contains(...) (#711)

This is an automated email from the ASF dual-hosted git repository.

fokko pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/parquet-mr.git


The following commit(s) were added to refs/heads/master by this push:
     new 3b4ecf2  PARQUET-1723: Read From Maps without using .contains(...) (#711)
3b4ecf2 is described below

commit 3b4ecf2c7029d12265b3e97a1f8ea98da1c1e5c2
Author: belugabehr <12...@users.noreply.github.com>
AuthorDate: Thu Jan 2 07:46:49 2020 -0500

    PARQUET-1723: Read From Maps without using .contains(...) (#711)
---
 .../main/java/org/apache/parquet/column/EncodingStats.java | 14 ++++----------
 .../src/main/java/org/apache/parquet/schema/GroupType.java |  5 +++--
 .../main/java/org/apache/parquet/ParquetReadOptions.java   |  9 ++++-----
 .../apache/parquet/hadoop/ColumnChunkPageReadStore.java    |  5 +++--
 .../main/java/org/apache/parquet/hadoop/MemoryManager.java |  4 +---
 5 files changed, 15 insertions(+), 22 deletions(-)

diff --git a/parquet-column/src/main/java/org/apache/parquet/column/EncodingStats.java b/parquet-column/src/main/java/org/apache/parquet/column/EncodingStats.java
index a8b95f8..b9f9b67 100644
--- a/parquet-column/src/main/java/org/apache/parquet/column/EncodingStats.java
+++ b/parquet-column/src/main/java/org/apache/parquet/column/EncodingStats.java
@@ -57,19 +57,13 @@ public class EncodingStats {
   }
 
   public int getNumDictionaryPagesEncodedAs(Encoding enc) {
-    if (dictStats.containsKey(enc)) {
-      return dictStats.get(enc);
-    } else {
-      return 0;
-    }
+    final Integer i = dictStats.get(enc);
+    return (i == null) ? 0 : i.intValue();
   }
 
   public int getNumDataPagesEncodedAs(Encoding enc) {
-    if (dataStats.containsKey(enc)) {
-      return dataStats.get(enc);
-    } else {
-      return 0;
-    }
+    final Integer i = dataStats.get(enc);
+    return (i == null) ? 0 : i.intValue();
   }
 
   public boolean hasDictionaryPages() {
diff --git a/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java b/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java
index 52184e1..897fdf8 100644
--- a/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java
+++ b/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java
@@ -171,10 +171,11 @@ public class GroupType extends Type {
    * @return the index of the field with that name
    */
   public int getFieldIndex(String name) {
-    if (!indexByName.containsKey(name)) {
+    Integer i = indexByName.get(name);
+    if (i == null) {
       throw new InvalidRecordException(name + " not found in " + this);
     }
-    return indexByName.get(name);
+    return i.intValue();
   }
 
   /**
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/ParquetReadOptions.java b/parquet-hadoop/src/main/java/org/apache/parquet/ParquetReadOptions.java
index f059023..5e4bd09 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/ParquetReadOptions.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/ParquetReadOptions.java
@@ -29,6 +29,7 @@ import org.apache.parquet.hadoop.util.HadoopCodecs;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Optional;
 import java.util.Set;
 
 import static org.apache.parquet.format.converter.ParquetMetadataConverter.NO_FILTER;
@@ -134,11 +135,9 @@ public class ParquetReadOptions {
   }
 
   public boolean isEnabled(String property, boolean defaultValue) {
-    if (properties.containsKey(property)) {
-      return Boolean.valueOf(properties.get(property));
-    } else {
-      return defaultValue;
-    }
+    Optional<String> propValue = Optional.ofNullable(properties.get(property));
+    return propValue.isPresent() ? Boolean.valueOf(propValue.get())
+        : defaultValue;
   }
 
   public static Builder builder() {
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ColumnChunkPageReadStore.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ColumnChunkPageReadStore.java
index 3067e2b..6f21fa3 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ColumnChunkPageReadStore.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ColumnChunkPageReadStore.java
@@ -227,10 +227,11 @@ class ColumnChunkPageReadStore implements PageReadStore, DictionaryPageReadStore
 
   @Override
   public PageReader getPageReader(ColumnDescriptor path) {
-    if (!readers.containsKey(path)) {
+    final PageReader pageReader = readers.get(path);
+    if (pageReader == null) {
       throw new IllegalArgumentException(path + " is not in the store: " + readers.keySet() + " " + rowCount);
     }
-    return readers.get(path);
+    return pageReader;
   }
 
   @Override
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java
index dc5c31d..d6fabb2 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java
@@ -92,9 +92,7 @@ public class MemoryManager {
    * @param writer the writer that has been closed
    */
   synchronized void removeWriter(InternalParquetRecordWriter writer) {
-    if (writerList.containsKey(writer)) {
-      writerList.remove(writer);
-    }
+    writerList.remove(writer);
     if (!writerList.isEmpty()) {
       updateAllocation();
     }