You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@iceberg.apache.org by bl...@apache.org on 2019/06/22 23:03:30 UTC
[incubator-iceberg] branch master updated: Use JDK StandardCharsets
instead of commons-io (#230)
This is an automated email from the ASF dual-hosted git repository.
blue pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-iceberg.git
The following commit(s) were added to refs/heads/master by this push:
new 2bdc6de Use JDK StandardCharsets instead of commons-io (#230)
2bdc6de is described below
commit 2bdc6debeb1cdcbd44f5c1d20cc40a1a61e9cd4d
Author: David Phillips <da...@acz.org>
AuthorDate: Sat Jun 22 16:03:26 2019 -0700
Use JDK StandardCharsets instead of commons-io (#230)
---
.../java/org/apache/iceberg/parquet/ParquetConversions.java | 4 ++--
.../apache/iceberg/parquet/TestMetricsRowGroupFilterTypes.java | 10 +++++-----
.../test/java/org/apache/iceberg/parquet/TestParquetUtil.java | 4 ++--
3 files changed, 9 insertions(+), 9 deletions(-)
diff --git a/parquet/src/main/java/org/apache/iceberg/parquet/ParquetConversions.java b/parquet/src/main/java/org/apache/iceberg/parquet/ParquetConversions.java
index 251d23f..431c636 100644
--- a/parquet/src/main/java/org/apache/iceberg/parquet/ParquetConversions.java
+++ b/parquet/src/main/java/org/apache/iceberg/parquet/ParquetConversions.java
@@ -22,9 +22,9 @@ package org.apache.iceberg.parquet;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.util.UUID;
import java.util.function.Function;
-import org.apache.commons.io.Charsets;
import org.apache.iceberg.expressions.Literal;
import org.apache.iceberg.types.Type;
import org.apache.parquet.io.api.Binary;
@@ -73,7 +73,7 @@ class ParquetConversions {
switch (type.getOriginalType()) {
case UTF8:
// decode to CharSequence to avoid copying into a new String
- return binary -> Charsets.UTF_8.decode(((Binary) binary).toByteBuffer());
+ return binary -> StandardCharsets.UTF_8.decode(((Binary) binary).toByteBuffer());
case DECIMAL:
int scale = type.getDecimalMetadata().getScale();
switch (type.getPrimitiveTypeName()) {
diff --git a/parquet/src/test/java/org/apache/iceberg/parquet/TestMetricsRowGroupFilterTypes.java b/parquet/src/test/java/org/apache/iceberg/parquet/TestMetricsRowGroupFilterTypes.java
index 8c9f141..03d876c 100644
--- a/parquet/src/test/java/org/apache/iceberg/parquet/TestMetricsRowGroupFilterTypes.java
+++ b/parquet/src/test/java/org/apache/iceberg/parquet/TestMetricsRowGroupFilterTypes.java
@@ -23,11 +23,11 @@ import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.util.UUID;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericFixed;
import org.apache.avro.generic.GenericRecordBuilder;
-import org.apache.commons.io.Charsets;
import org.apache.iceberg.Files;
import org.apache.iceberg.Schema;
import org.apache.iceberg.expressions.Literal;
@@ -111,7 +111,7 @@ public class TestMetricsRowGroupFilterTypes {
.to(TimestampType.withoutZone()).value();
private static final GenericFixed fixed = new GenericData.Fixed(
org.apache.avro.Schema.createFixed("_fixed", null, null, 4),
- "abcd".getBytes(Charsets.UTF_8));
+ "abcd".getBytes(StandardCharsets.UTF_8));
@BeforeClass
public static void createInputFile() throws IOException {
@@ -138,7 +138,7 @@ public class TestMetricsRowGroupFilterTypes {
builder.set("_string", "tapir");
builder.set("_uuid", uuid);
builder.set("_fixed", fixed);
- builder.set("_binary", ByteBuffer.wrap("xyz".getBytes(Charsets.UTF_8)));
+ builder.set("_binary", ByteBuffer.wrap("xyz".getBytes(StandardCharsets.UTF_8)));
builder.set("_int_decimal", new BigDecimal("77.77"));
builder.set("_long_decimal", new BigDecimal("88.88"));
builder.set("_fixed_decimal", new BigDecimal("99.99"));
@@ -178,8 +178,8 @@ public class TestMetricsRowGroupFilterTypes {
"2018-06-29T10:02:34.000000-07:00" },
new Object[] { "string", "tapir", "monthly" },
// new Object[] { "uuid", uuid, UUID.randomUUID() }, // not supported yet
- new Object[] { "fixed", "abcd".getBytes(Charsets.UTF_8), new byte[] { 0, 1, 2, 3 } },
- new Object[] { "binary", "xyz".getBytes(Charsets.UTF_8), new byte[] { 0, 1, 2, 3, 4, 5 } },
+ new Object[] { "fixed", "abcd".getBytes(StandardCharsets.UTF_8), new byte[] { 0, 1, 2, 3 } },
+ new Object[] { "binary", "xyz".getBytes(StandardCharsets.UTF_8), new byte[] { 0, 1, 2, 3, 4, 5 } },
new Object[] { "int_decimal", "77.77", "12.34" },
new Object[] { "long_decimal", "88.88", "12.34" },
new Object[] { "fixed_decimal", "99.99", "12.34" },
diff --git a/parquet/src/test/java/org/apache/iceberg/parquet/TestParquetUtil.java b/parquet/src/test/java/org/apache/iceberg/parquet/TestParquetUtil.java
index 1b681d3..e75c247 100644
--- a/parquet/src/test/java/org/apache/iceberg/parquet/TestParquetUtil.java
+++ b/parquet/src/test/java/org/apache/iceberg/parquet/TestParquetUtil.java
@@ -26,12 +26,12 @@ import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
+import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.UUID;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericData.Record;
import org.apache.avro.generic.GenericFixed;
-import org.apache.commons.io.Charsets;
import org.apache.iceberg.Metrics;
import org.apache.iceberg.Schema;
import org.apache.iceberg.avro.AvroSchemaUtil;
@@ -64,7 +64,7 @@ public class TestParquetUtil extends BaseParquetWritingTest {
private final UUID uuid = UUID.randomUUID();
private final GenericFixed fixed = new GenericData.Fixed(
org.apache.avro.Schema.createFixed("fixedCol", null, null, 4),
- "abcd".getBytes(Charsets.UTF_8));
+ "abcd".getBytes(StandardCharsets.UTF_8));
@Test
public void testMetricsForTopLevelFields() throws IOException {