You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iceberg.apache.org by ja...@apache.org on 2021/11/05 17:28:54 UTC
[iceberg] branch master updated: Checkstyle: remove use of Parquet
Preconditions and Apache Commons IOUtils (#3472)
This is an automated email from the ASF dual-hosted git repository.
jackye pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iceberg.git
The following commit(s) were added to refs/heads/master by this push:
new ce65722 Checkstyle: remove use of Parquet Preconditions and Apache Commons IOUtils (#3472)
ce65722 is described below
commit ce65722cc9f7d5ac6fc705c5c2a2c60d4d9ae8e8
Author: Jack Ye <yz...@amazon.com>
AuthorDate: Fri Nov 5 10:28:46 2021 -0700
Checkstyle: remove use of Parquet Preconditions and Apache Commons IOUtils (#3472)
---
.baseline/checkstyle/checkstyle.xml | 13 +++++++++++--
.../aliyun/oss/mock/AliyunOSSMockLocalController.java | 6 +++---
.../iceberg/aliyun/oss/mock/AliyunOSSMockLocalStore.java | 4 ++--
.../apache/iceberg/arrow/vectorized/ArrowBatchReader.java | 2 +-
.../arrow/vectorized/GenericArrowVectorAccessorFactory.java | 2 +-
.../src/main/java/org/apache/iceberg/data/DeleteFilter.java | 2 +-
.../org/apache/iceberg/data/parquet/BaseParquetWriter.java | 2 +-
.../java/org/apache/iceberg/parquet/ApplyNameMapping.java | 2 +-
.../iceberg/spark/data/vectorized/ColumnarBatchReader.java | 2 +-
.../iceberg/spark/data/vectorized/ColumnarBatchReader.java | 2 +-
.../iceberg/spark/data/vectorized/ColumnarBatchReader.java | 2 +-
.../iceberg/spark/data/vectorized/ColumnarBatchReader.java | 2 +-
12 files changed, 25 insertions(+), 16 deletions(-)
diff --git a/.baseline/checkstyle/checkstyle.xml b/.baseline/checkstyle/checkstyle.xml
index eff3082..6a8a37b 100644
--- a/.baseline/checkstyle/checkstyle.xml
+++ b/.baseline/checkstyle/checkstyle.xml
@@ -190,15 +190,20 @@
<message key="import.illegal" value="Use org.apache.iceberg.relocated.* classes from bundled-guava module instead."/>
</module>
<module name="IllegalImport">
- <property name="id" value="BanUnrelocatedAssertJClasses"/>
+ <property name="id" value="GuavaClassesInAssertJ"/>
<property name="illegalPkgs" value="org.assertj.core.util"/>
<message key="import.illegal" value="Use org.apache.iceberg.relocated.* classes from bundled-guava module instead."/>
</module>
<module name="IllegalImport">
- <property name="id" value="BanUnrelocatedArrowClasses"/>
+ <property name="id" value="GuavaClassesInArrow"/>
<property name="illegalPkgs" value="org.apache.arrow.util"/>
<message key="import.illegal" value="Use org.apache.iceberg.relocated.* classes from bundled-guava module instead."/>
</module>
+ <module name="IllegalImport">
+ <property name="id" value="GuavaClassesInParquet"/>
+ <property name="illegalClasses" value="org.apache.parquet.Preconditions"/>
+ <message key="import.illegal" value="Use Preconditions from bundled-guava module instead."/>
+ </module>
<module name="IllegalInstantiation"> <!-- Java Coding Guidelines: Never instantiate primitive types -->
<property name="classes" value="java.lang.Boolean"/>
<property name="classes" value="java.lang.Byte"/>
@@ -362,6 +367,10 @@
<property name="message" value="Prefer Guava''s [CharStreams,Files,Resources].toString to avoid charset/stream closing issues."/>
</module>
<module name="RegexpSinglelineJava">
+ <property name="format" value="\bIOUtils\.copy\("/>
+ <property name="message" value="Prefer Guava''s ByteStreams.copy to avoid charset/stream closing issues."/>
+ </module>
+ <module name="RegexpSinglelineJava">
<property name="format" value="static enum"/>
<property name="message" value="Redundant ''static'' modifier."/>
</module>
diff --git a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/AliyunOSSMockLocalController.java b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/AliyunOSSMockLocalController.java
index a9615f0..1c8539b 100644
--- a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/AliyunOSSMockLocalController.java
+++ b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/AliyunOSSMockLocalController.java
@@ -29,9 +29,9 @@ import java.io.OutputStream;
import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.commons.io.IOUtils;
import org.apache.commons.io.input.BoundedInputStream;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
+import org.apache.iceberg.relocated.com.google.common.io.ByteStreams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -168,7 +168,7 @@ public class AliyunOSSMockLocalController {
try (OutputStream outputStream = response.getOutputStream()) {
try (FileInputStream fis = new FileInputStream(metadata.getDataFile())) {
fis.skip(skipSize);
- IOUtils.copy(new BoundedInputStream(fis, bytesToRead), outputStream);
+ ByteStreams.copy(new BoundedInputStream(fis, bytesToRead), outputStream);
}
}
} else {
@@ -180,7 +180,7 @@ public class AliyunOSSMockLocalController {
try (OutputStream outputStream = response.getOutputStream()) {
try (FileInputStream fis = new FileInputStream(metadata.getDataFile())) {
- IOUtils.copy(fis, outputStream);
+ ByteStreams.copy(fis, outputStream);
}
}
}
diff --git a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/AliyunOSSMockLocalStore.java b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/AliyunOSSMockLocalStore.java
index 8427be9..f9784a7 100644
--- a/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/AliyunOSSMockLocalStore.java
+++ b/aliyun/src/test/java/org/apache/iceberg/aliyun/oss/mock/AliyunOSSMockLocalStore.java
@@ -37,10 +37,10 @@ import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
import org.apache.directory.api.util.Hex;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
+import org.apache.iceberg.relocated.com.google.common.io.ByteStreams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
@@ -93,7 +93,7 @@ public class AliyunOSSMockLocalStore {
private static void inputStreamToFile(InputStream inputStream, File targetFile) throws IOException {
try (OutputStream outputStream = new FileOutputStream(targetFile)) {
- IOUtils.copy(inputStream, outputStream);
+ ByteStreams.copy(inputStream, outputStream);
}
}
diff --git a/arrow/src/main/java/org/apache/iceberg/arrow/vectorized/ArrowBatchReader.java b/arrow/src/main/java/org/apache/iceberg/arrow/vectorized/ArrowBatchReader.java
index 375e314..9cb13eb 100644
--- a/arrow/src/main/java/org/apache/iceberg/arrow/vectorized/ArrowBatchReader.java
+++ b/arrow/src/main/java/org/apache/iceberg/arrow/vectorized/ArrowBatchReader.java
@@ -21,7 +21,7 @@ package org.apache.iceberg.arrow.vectorized;
import java.util.List;
import org.apache.iceberg.parquet.VectorizedReader;
-import org.apache.parquet.Preconditions;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
/**
* A collection of vectorized readers per column (in the expected read schema) and Arrow Vector holders. This class owns
diff --git a/arrow/src/main/java/org/apache/iceberg/arrow/vectorized/GenericArrowVectorAccessorFactory.java b/arrow/src/main/java/org/apache/iceberg/arrow/vectorized/GenericArrowVectorAccessorFactory.java
index 87b70a8..421b8fb 100644
--- a/arrow/src/main/java/org/apache/iceberg/arrow/vectorized/GenericArrowVectorAccessorFactory.java
+++ b/arrow/src/main/java/org/apache/iceberg/arrow/vectorized/GenericArrowVectorAccessorFactory.java
@@ -43,7 +43,7 @@ import org.apache.arrow.vector.VarCharVector;
import org.apache.arrow.vector.complex.ListVector;
import org.apache.arrow.vector.complex.StructVector;
import org.apache.arrow.vector.util.DecimalUtility;
-import org.apache.parquet.Preconditions;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.column.Dictionary;
import org.apache.parquet.io.api.Binary;
diff --git a/data/src/main/java/org/apache/iceberg/data/DeleteFilter.java b/data/src/main/java/org/apache/iceberg/data/DeleteFilter.java
index 40a77a1..c4123d5 100644
--- a/data/src/main/java/org/apache/iceberg/data/DeleteFilter.java
+++ b/data/src/main/java/org/apache/iceberg/data/DeleteFilter.java
@@ -42,6 +42,7 @@ import org.apache.iceberg.io.CloseableIterable;
import org.apache.iceberg.io.InputFile;
import org.apache.iceberg.orc.ORC;
import org.apache.iceberg.parquet.Parquet;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
@@ -54,7 +55,6 @@ import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.Filter;
import org.apache.iceberg.util.StructLikeSet;
import org.apache.iceberg.util.StructProjection;
-import org.apache.parquet.Preconditions;
public abstract class DeleteFilter<T> {
private static final long DEFAULT_SET_FILTER_THRESHOLD = 100_000L;
diff --git a/parquet/src/main/java/org/apache/iceberg/data/parquet/BaseParquetWriter.java b/parquet/src/main/java/org/apache/iceberg/data/parquet/BaseParquetWriter.java
index bde92c7..ee8d7e0 100644
--- a/parquet/src/main/java/org/apache/iceberg/data/parquet/BaseParquetWriter.java
+++ b/parquet/src/main/java/org/apache/iceberg/data/parquet/BaseParquetWriter.java
@@ -31,8 +31,8 @@ import java.util.Optional;
import org.apache.iceberg.parquet.ParquetTypeVisitor;
import org.apache.iceberg.parquet.ParquetValueWriter;
import org.apache.iceberg.parquet.ParquetValueWriters;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
-import org.apache.parquet.Preconditions;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.GroupType;
diff --git a/parquet/src/main/java/org/apache/iceberg/parquet/ApplyNameMapping.java b/parquet/src/main/java/org/apache/iceberg/parquet/ApplyNameMapping.java
index 4943a35..66e88c9 100644
--- a/parquet/src/main/java/org/apache/iceberg/parquet/ApplyNameMapping.java
+++ b/parquet/src/main/java/org/apache/iceberg/parquet/ApplyNameMapping.java
@@ -24,7 +24,7 @@ import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.iceberg.mapping.MappedField;
import org.apache.iceberg.mapping.NameMapping;
-import org.apache.parquet.Preconditions;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.PrimitiveType;
diff --git a/spark/v2.4/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java b/spark/v2.4/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
index 9449746..f71a696 100644
--- a/spark/v2.4/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
+++ b/spark/v2.4/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
@@ -23,7 +23,7 @@ import java.util.List;
import org.apache.iceberg.arrow.vectorized.BaseBatchReader;
import org.apache.iceberg.arrow.vectorized.VectorizedArrowReader;
import org.apache.iceberg.parquet.VectorizedReader;
-import org.apache.parquet.Preconditions;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.spark.sql.vectorized.ColumnVector;
import org.apache.spark.sql.vectorized.ColumnarBatch;
diff --git a/spark/v3.0/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java b/spark/v3.0/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
index 9449746..f71a696 100644
--- a/spark/v3.0/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
+++ b/spark/v3.0/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
@@ -23,7 +23,7 @@ import java.util.List;
import org.apache.iceberg.arrow.vectorized.BaseBatchReader;
import org.apache.iceberg.arrow.vectorized.VectorizedArrowReader;
import org.apache.iceberg.parquet.VectorizedReader;
-import org.apache.parquet.Preconditions;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.spark.sql.vectorized.ColumnVector;
import org.apache.spark.sql.vectorized.ColumnarBatch;
diff --git a/spark/v3.1/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java b/spark/v3.1/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
index 9449746..f71a696 100644
--- a/spark/v3.1/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
+++ b/spark/v3.1/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
@@ -23,7 +23,7 @@ import java.util.List;
import org.apache.iceberg.arrow.vectorized.BaseBatchReader;
import org.apache.iceberg.arrow.vectorized.VectorizedArrowReader;
import org.apache.iceberg.parquet.VectorizedReader;
-import org.apache.parquet.Preconditions;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.spark.sql.vectorized.ColumnVector;
import org.apache.spark.sql.vectorized.ColumnarBatch;
diff --git a/spark/v3.2/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java b/spark/v3.2/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
index 9449746..f71a696 100644
--- a/spark/v3.2/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
+++ b/spark/v3.2/spark/src/main/java/org/apache/iceberg/spark/data/vectorized/ColumnarBatchReader.java
@@ -23,7 +23,7 @@ import java.util.List;
import org.apache.iceberg.arrow.vectorized.BaseBatchReader;
import org.apache.iceberg.arrow.vectorized.VectorizedArrowReader;
import org.apache.iceberg.parquet.VectorizedReader;
-import org.apache.parquet.Preconditions;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.spark.sql.vectorized.ColumnVector;
import org.apache.spark.sql.vectorized.ColumnarBatch;