You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iceberg.apache.org by bl...@apache.org on 2020/03/13 23:00:24 UTC

[incubator-iceberg] branch master updated: Build: Remove warnings (#770)

This is an automated email from the ASF dual-hosted git repository.

blue pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-iceberg.git


The following commit(s) were added to refs/heads/master by this push:
     new c3084d3  Build: Remove warnings (#770)
c3084d3 is described below

commit c3084d362111e5ab0a07488739fdd53b8e179c4c
Author: Fokko Driesprong <fo...@apache.org>
AuthorDate: Sat Mar 14 00:00:14 2020 +0100

    Build: Remove warnings (#770)
---
 .../main/java/org/apache/iceberg/PartitionSpec.java | 18 ------------------
 .../iceberg/expressions/StrictMetricsEvaluator.java |  2 +-
 .../java/org/apache/iceberg/transforms/Bucket.java  | 19 -------------------
 .../java/org/apache/iceberg/AllManifestsTable.java  |  2 --
 .../org/apache/iceberg/BaseRewriteManifests.java    |  2 +-
 .../apache/iceberg/InheritableMetadataFactory.java  |  2 ++
 .../org/apache/iceberg/TableMetadataParser.java     |  9 +++++----
 .../iceberg/hadoop/HadoopTableOperations.java       |  4 ++--
 .../org/apache/iceberg/data/IcebergGenerics.java    |  2 --
 .../apache/iceberg/data/orc/GenericOrcWriter.java   | 21 ++++++++++++++-------
 .../org/apache/iceberg/spark/SparkSchemaUtil.java   |  4 ----
 .../org/apache/iceberg/spark/source/Writer.java     |  2 --
 12 files changed, 25 insertions(+), 62 deletions(-)

diff --git a/api/src/main/java/org/apache/iceberg/PartitionSpec.java b/api/src/main/java/org/apache/iceberg/PartitionSpec.java
index 7dc070b..9031a7e 100644
--- a/api/src/main/java/org/apache/iceberg/PartitionSpec.java
+++ b/api/src/main/java/org/apache/iceberg/PartitionSpec.java
@@ -21,7 +21,6 @@ package org.apache.iceberg;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ListMultimap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
@@ -57,7 +56,6 @@ public class PartitionSpec implements Serializable {
   private final int specId;
   private final PartitionField[] fields;
   private transient volatile ListMultimap<Integer, PartitionField> fieldsBySourceId = null;
-  private transient volatile Map<String, PartitionField> fieldsByName = null;
   private transient volatile Class<?>[] lazyJavaClasses = null;
   private transient volatile List<PartitionField> fieldList = null;
 
@@ -228,22 +226,6 @@ public class PartitionSpec implements Serializable {
     return fieldList;
   }
 
-  private Map<String, PartitionField> lazyFieldsByName() {
-    if (fieldsByName == null) {
-      synchronized (this) {
-        if (fieldsByName == null) {
-          ImmutableMap.Builder<String, PartitionField> builder = ImmutableMap.builder();
-          for (PartitionField field : fields) {
-            builder.put(field.name(), field);
-          }
-          this.fieldsByName = builder.build();
-        }
-      }
-    }
-
-    return fieldsByName;
-  }
-
   private ListMultimap<Integer, PartitionField> lazyFieldsBySourceId() {
     if (fieldsBySourceId == null) {
       synchronized (this) {
diff --git a/api/src/main/java/org/apache/iceberg/expressions/StrictMetricsEvaluator.java b/api/src/main/java/org/apache/iceberg/expressions/StrictMetricsEvaluator.java
index e39717c..8cc3e58 100644
--- a/api/src/main/java/org/apache/iceberg/expressions/StrictMetricsEvaluator.java
+++ b/api/src/main/java/org/apache/iceberg/expressions/StrictMetricsEvaluator.java
@@ -382,7 +382,7 @@ public class StrictMetricsEvaluator {
     }
 
     private boolean canContainNulls(Integer id) {
-      return nullCounts == null || nullCounts.containsKey(id) && nullCounts.get(id) > 0;
+      return nullCounts == null || (nullCounts.containsKey(id) && nullCounts.get(id) > 0);
     }
 
     private boolean containsNullsOnly(Integer id) {
diff --git a/api/src/main/java/org/apache/iceberg/transforms/Bucket.java b/api/src/main/java/org/apache/iceberg/transforms/Bucket.java
index a35d056..b5ebb8f 100644
--- a/api/src/main/java/org/apache/iceberg/transforms/Bucket.java
+++ b/api/src/main/java/org/apache/iceberg/transforms/Bucket.java
@@ -241,25 +241,6 @@ abstract class Bucket<T> implements Transform<T, Integer> {
     }
   }
 
-  private static class BucketBytes extends Bucket<byte[]> {
-    private static final Set<TypeID> SUPPORTED_TYPES = Sets.newHashSet(
-        TypeID.BINARY, TypeID.FIXED);
-
-    private BucketBytes(int numBuckets) {
-      super(numBuckets);
-    }
-
-    @Override
-    public int hash(byte[] value) {
-      return MURMUR3.hashBytes(value).asInt();
-    }
-
-    @Override
-    public boolean canTransform(Type type) {
-      return SUPPORTED_TYPES.contains(type.typeId());
-    }
-  }
-
   private static class BucketByteBuffer extends Bucket<ByteBuffer> {
     private static final Set<TypeID> SUPPORTED_TYPES = Sets.newHashSet(
         TypeID.BINARY, TypeID.FIXED);
diff --git a/core/src/main/java/org/apache/iceberg/AllManifestsTable.java b/core/src/main/java/org/apache/iceberg/AllManifestsTable.java
index 1860077..823d487 100644
--- a/core/src/main/java/org/apache/iceberg/AllManifestsTable.java
+++ b/core/src/main/java/org/apache/iceberg/AllManifestsTable.java
@@ -57,12 +57,10 @@ public class AllManifestsTable extends BaseMetadataTable {
 
   private final TableOperations ops;
   private final Table table;
-  private final PartitionSpec spec;
 
   public AllManifestsTable(TableOperations ops, Table table) {
     this.ops = ops;
     this.table = table;
-    this.spec = table.spec();
   }
 
   @Override
diff --git a/core/src/main/java/org/apache/iceberg/BaseRewriteManifests.java b/core/src/main/java/org/apache/iceberg/BaseRewriteManifests.java
index 3c40019..5a9469c 100644
--- a/core/src/main/java/org/apache/iceberg/BaseRewriteManifests.java
+++ b/core/src/main/java/org/apache/iceberg/BaseRewriteManifests.java
@@ -57,7 +57,7 @@ public class BaseRewriteManifests extends SnapshotProducer<RewriteManifests> imp
   private static final String REPLACED_MANIFESTS_COUNT = "manifests-replaced";
   private static final String PROCESSED_ENTRY_COUNT = "entries-processed";
 
-  private static final Set<ManifestEntry.Status> ALLOWED_ENTRY_STATUSES = ImmutableSet.of(
+  private static final ImmutableSet<ManifestEntry.Status> ALLOWED_ENTRY_STATUSES = ImmutableSet.of(
       ManifestEntry.Status.EXISTING);
 
   private final TableOperations ops;
diff --git a/core/src/main/java/org/apache/iceberg/InheritableMetadataFactory.java b/core/src/main/java/org/apache/iceberg/InheritableMetadataFactory.java
index 14bf2ff..384ea64 100644
--- a/core/src/main/java/org/apache/iceberg/InheritableMetadataFactory.java
+++ b/core/src/main/java/org/apache/iceberg/InheritableMetadataFactory.java
@@ -41,6 +41,7 @@ class InheritableMetadataFactory {
       this.snapshotId = snapshotId;
     }
 
+    @Override
     public ManifestEntry apply(ManifestEntry manifestEntry) {
       if (manifestEntry.snapshotId() == null) {
         manifestEntry.setSnapshotId(snapshotId);
@@ -53,6 +54,7 @@ class InheritableMetadataFactory {
 
     private EmptyInheritableMetadata() {}
 
+    @Override
     public ManifestEntry apply(ManifestEntry manifestEntry) {
       if (manifestEntry.snapshotId() == null) {
         throw new IllegalArgumentException("Entries must have explicit snapshot ids if inherited metadata is empty");
diff --git a/core/src/main/java/org/apache/iceberg/TableMetadataParser.java b/core/src/main/java/org/apache/iceberg/TableMetadataParser.java
index f98bf49..fd68338 100644
--- a/core/src/main/java/org/apache/iceberg/TableMetadataParser.java
+++ b/core/src/main/java/org/apache/iceberg/TableMetadataParser.java
@@ -30,6 +30,7 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.StringWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.List;
@@ -112,7 +113,8 @@ public class TableMetadataParser {
       TableMetadata metadata, OutputFile outputFile, boolean overwrite) {
     boolean isGzip = Codec.fromFileName(outputFile.location()) == Codec.GZIP;
     OutputStream stream = overwrite ? outputFile.createOrOverwrite() : outputFile.create();
-    try (OutputStreamWriter writer = new OutputStreamWriter(isGzip ? new GZIPOutputStream(stream) : stream)) {
+    try (OutputStream ou = isGzip ? new GZIPOutputStream(stream) : stream;
+         OutputStreamWriter writer = new OutputStreamWriter(ou, StandardCharsets.UTF_8)) {
       JsonGenerator generator = JsonUtil.factory().createGenerator(writer);
       generator.useDefaultPrettyPrinter();
       toJson(metadata, generator);
@@ -136,15 +138,14 @@ public class TableMetadataParser {
   }
 
   public static String toJson(TableMetadata metadata) {
-    StringWriter writer = new StringWriter();
-    try {
+    try (StringWriter writer = new StringWriter()) {
       JsonGenerator generator = JsonUtil.factory().createGenerator(writer);
       toJson(metadata, generator);
       generator.flush();
+      return writer.toString();
     } catch (IOException e) {
       throw new RuntimeIOException(e, "Failed to write json for: %s", metadata);
     }
-    return writer.toString();
   }
 
   private static void toJson(TableMetadata metadata, JsonGenerator generator) throws IOException {
diff --git a/core/src/main/java/org/apache/iceberg/hadoop/HadoopTableOperations.java b/core/src/main/java/org/apache/iceberg/hadoop/HadoopTableOperations.java
index b24fdbb..8bad148 100644
--- a/core/src/main/java/org/apache/iceberg/hadoop/HadoopTableOperations.java
+++ b/core/src/main/java/org/apache/iceberg/hadoop/HadoopTableOperations.java
@@ -262,7 +262,6 @@ public class HadoopTableOperations implements TableOperations {
 
     try (FSDataOutputStream out = fs.create(versionHintFile, true /* overwrite */)) {
       out.write(String.valueOf(versionToWrite).getBytes(StandardCharsets.UTF_8));
-
     } catch (IOException e) {
       LOG.warn("Failed to update version hint", e);
     }
@@ -276,7 +275,8 @@ public class HadoopTableOperations implements TableOperations {
         return 0;
       }
 
-      try (BufferedReader in = new BufferedReader(new InputStreamReader(fs.open(versionHintFile)))) {
+      try (InputStreamReader fsr = new InputStreamReader(fs.open(versionHintFile), StandardCharsets.UTF_8);
+           BufferedReader in = new BufferedReader(fsr)) {
         return Integer.parseInt(in.readLine().replace("\n", ""));
       }
 
diff --git a/data/src/main/java/org/apache/iceberg/data/IcebergGenerics.java b/data/src/main/java/org/apache/iceberg/data/IcebergGenerics.java
index 3dfcbac..311a80a 100644
--- a/data/src/main/java/org/apache/iceberg/data/IcebergGenerics.java
+++ b/data/src/main/java/org/apache/iceberg/data/IcebergGenerics.java
@@ -39,12 +39,10 @@ public class IcebergGenerics {
   }
 
   public static class ScanBuilder {
-    private final Table table;
     private TableScan tableScan;
     private boolean reuseContainers = false;
 
     public ScanBuilder(Table table) {
-      this.table = table;
       this.tableScan = table.newScan();
     }
 
diff --git a/data/src/main/java/org/apache/iceberg/data/orc/GenericOrcWriter.java b/data/src/main/java/org/apache/iceberg/data/orc/GenericOrcWriter.java
index a6648a9..78c6f0d 100644
--- a/data/src/main/java/org/apache/iceberg/data/orc/GenericOrcWriter.java
+++ b/data/src/main/java/org/apache/iceberg/data/orc/GenericOrcWriter.java
@@ -103,6 +103,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return Byte.class;
     }
 
+    @Override
     public void addValue(int rowId, Byte data, ColumnVector output) {
       if (data == null) {
         output.noNulls = false;
@@ -120,6 +121,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return Short.class;
     }
 
+    @Override
     public void addValue(int rowId, Short data, ColumnVector output) {
       if (data == null) {
         output.noNulls = false;
@@ -137,6 +139,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return Integer.class;
     }
 
+    @Override
     public void addValue(int rowId, Integer data, ColumnVector output) {
       if (data == null) {
         output.noNulls = false;
@@ -154,6 +157,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return Long.class;
     }
 
+    @Override
     public void addValue(int rowId, Long data, ColumnVector output) {
       if (data == null) {
         output.noNulls = false;
@@ -171,6 +175,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return Float.class;
     }
 
+    @Override
     public void addValue(int rowId, Float data, ColumnVector output) {
       if (data == null) {
         output.noNulls = false;
@@ -188,6 +193,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return Double.class;
     }
 
+    @Override
     public void addValue(int rowId, Double data, ColumnVector output) {
       if (data == null) {
         output.noNulls = false;
@@ -205,6 +211,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return String.class;
     }
 
+    @Override
     public void addValue(int rowId, String data, ColumnVector output) {
       if (data == null) {
         output.noNulls = false;
@@ -223,6 +230,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return byte[].class;
     }
 
+    @Override
     public void addValue(int rowId, byte[] data, ColumnVector output) {
       if (data == null) {
         output.noNulls = false;
@@ -242,6 +250,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return Long.class;
     }
 
+    @Override
     public void addValue(int rowId, Long data, ColumnVector output) {
       if (data == null) {
         output.noNulls = false;
@@ -257,11 +266,9 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
   }
 
   static class Decimal18Converter implements Converter<BigDecimal> {
-    private final int precision;
     private final int scale;
 
     Decimal18Converter(TypeDescription schema) {
-      this.precision = schema.getPrecision();
       this.scale = schema.getScale();
     }
 
@@ -270,6 +277,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return BigDecimal.class;
     }
 
+    @Override
     public void addValue(int rowId, BigDecimal data, ColumnVector output) {
       // TODO: validate precision and scale from schema
       if (data == null) {
@@ -284,12 +292,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
   }
 
   static class Decimal38Converter implements Converter<BigDecimal> {
-    private final int precision;
-    private final int scale;
-
     Decimal38Converter(TypeDescription schema) {
-      this.precision = schema.getPrecision();
-      this.scale = schema.getScale();
     }
 
     @Override
@@ -297,6 +300,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return BigDecimal.class;
     }
 
+    @Override
     public void addValue(int rowId, BigDecimal data, ColumnVector output) {
       // TODO: validate precision and scale from schema
       if (data == null) {
@@ -324,6 +328,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return Record.class;
     }
 
+    @Override
     @SuppressWarnings("unchecked")
     public void addValue(int rowId, Record data, ColumnVector output) {
       if (data == null) {
@@ -351,6 +356,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return List.class;
     }
 
+    @Override
     @SuppressWarnings("unchecked")
     public void addValue(int rowId, List data, ColumnVector output) {
       if (data == null) {
@@ -388,6 +394,7 @@ public class GenericOrcWriter implements OrcValueWriter<Record> {
       return Map.class;
     }
 
+    @Override
     @SuppressWarnings("unchecked")
     public void addValue(int rowId, Map data, ColumnVector output) {
       if (data == null) {
diff --git a/spark/src/main/java/org/apache/iceberg/spark/SparkSchemaUtil.java b/spark/src/main/java/org/apache/iceberg/spark/SparkSchemaUtil.java
index 2b9fa0d..58e785f 100644
--- a/spark/src/main/java/org/apache/iceberg/spark/SparkSchemaUtil.java
+++ b/spark/src/main/java/org/apache/iceberg/spark/SparkSchemaUtil.java
@@ -242,10 +242,6 @@ public class SparkSchemaUtil {
     return identitySpec(schema, names);
   }
 
-  private static PartitionSpec identitySpec(Schema schema, String... partitionNames) {
-    return identitySpec(schema, Lists.newArrayList(partitionNames));
-  }
-
   private static PartitionSpec identitySpec(Schema schema, List<String> partitionNames) {
     if (partitionNames == null || partitionNames.isEmpty()) {
       return null;
diff --git a/spark/src/main/java/org/apache/iceberg/spark/source/Writer.java b/spark/src/main/java/org/apache/iceberg/spark/source/Writer.java
index fdfc199..8f5b174 100644
--- a/spark/src/main/java/org/apache/iceberg/spark/source/Writer.java
+++ b/spark/src/main/java/org/apache/iceberg/spark/source/Writer.java
@@ -321,7 +321,6 @@ class Writer implements DataSourceWriter {
     private class OutputFileFactory {
       private final int partitionId;
       private final long taskId;
-      private final long epochId;
       // The purpose of this uuid is to be able to know from two paths that they were written by the same operation.
       // That's useful, for example, if a Spark job dies and leaves files in the file system, you can identify them all
       // with a recursive listing and grep.
@@ -331,7 +330,6 @@ class Writer implements DataSourceWriter {
       OutputFileFactory(int partitionId, long taskId, long epochId) {
         this.partitionId = partitionId;
         this.taskId = taskId;
-        this.epochId = epochId;
         this.fileCount = 0;
       }