You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kafka.apache.org by ij...@apache.org on 2018/01/01 13:07:35 UTC

[kafka] branch trunk updated: Replace Arrays.asList with Collections.singletonList where possible (#4368)

This is an automated email from the ASF dual-hosted git repository.

ijuma pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/kafka.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 96df935  Replace Arrays.asList with Collections.singletonList where possible (#4368)
96df935 is described below

commit 96df93522f84173ff47f47ec78ec408991140b65
Author: Koen De Groote <kd...@gmail.com>
AuthorDate: Mon Jan 1 14:07:32 2018 +0100

    Replace Arrays.asList with Collections.singletonList where possible (#4368)
    
    Reviewers: Ismael Juma <is...@juma.me.uk>
---
 .../apache/kafka/connect/data/ConnectSchema.java   | 30 +++++++++++-----------
 .../connect/storage/OffsetStorageReaderImpl.java   |  3 ++-
 .../kafka/connect/tools/SchemaSourceTask.java      |  3 +--
 .../kafka/connect/tools/VerifiableSourceTask.java  |  3 +--
 4 files changed, 19 insertions(+), 20 deletions(-)

diff --git a/connect/api/src/main/java/org/apache/kafka/connect/data/ConnectSchema.java b/connect/api/src/main/java/org/apache/kafka/connect/data/ConnectSchema.java
index f90e635..6845057 100644
--- a/connect/api/src/main/java/org/apache/kafka/connect/data/ConnectSchema.java
+++ b/connect/api/src/main/java/org/apache/kafka/connect/data/ConnectSchema.java
@@ -44,31 +44,31 @@ public class ConnectSchema implements Schema {
     private static final Map<Class<?>, Type> JAVA_CLASS_SCHEMA_TYPES = new HashMap<>();
 
     static {
-        SCHEMA_TYPE_CLASSES.put(Type.INT8, Arrays.asList((Class) Byte.class));
-        SCHEMA_TYPE_CLASSES.put(Type.INT16, Arrays.asList((Class) Short.class));
-        SCHEMA_TYPE_CLASSES.put(Type.INT32, Arrays.asList((Class) Integer.class));
-        SCHEMA_TYPE_CLASSES.put(Type.INT64, Arrays.asList((Class) Long.class));
-        SCHEMA_TYPE_CLASSES.put(Type.FLOAT32, Arrays.asList((Class) Float.class));
-        SCHEMA_TYPE_CLASSES.put(Type.FLOAT64, Arrays.asList((Class) Double.class));
-        SCHEMA_TYPE_CLASSES.put(Type.BOOLEAN, Arrays.asList((Class) Boolean.class));
-        SCHEMA_TYPE_CLASSES.put(Type.STRING, Arrays.asList((Class) String.class));
+        SCHEMA_TYPE_CLASSES.put(Type.INT8, Collections.singletonList((Class) Byte.class));
+        SCHEMA_TYPE_CLASSES.put(Type.INT16, Collections.singletonList((Class) Short.class));
+        SCHEMA_TYPE_CLASSES.put(Type.INT32, Collections.singletonList((Class) Integer.class));
+        SCHEMA_TYPE_CLASSES.put(Type.INT64, Collections.singletonList((Class) Long.class));
+        SCHEMA_TYPE_CLASSES.put(Type.FLOAT32, Collections.singletonList((Class) Float.class));
+        SCHEMA_TYPE_CLASSES.put(Type.FLOAT64, Collections.singletonList((Class) Double.class));
+        SCHEMA_TYPE_CLASSES.put(Type.BOOLEAN, Collections.singletonList((Class) Boolean.class));
+        SCHEMA_TYPE_CLASSES.put(Type.STRING, Collections.singletonList((Class) String.class));
         // Bytes are special and have 2 representations. byte[] causes problems because it doesn't handle equals() and
         // hashCode() like we want objects to, so we support both byte[] and ByteBuffer. Using plain byte[] can cause
         // those methods to fail, so ByteBuffers are recommended
         SCHEMA_TYPE_CLASSES.put(Type.BYTES, Arrays.asList((Class) byte[].class, (Class) ByteBuffer.class));
-        SCHEMA_TYPE_CLASSES.put(Type.ARRAY, Arrays.asList((Class) List.class));
-        SCHEMA_TYPE_CLASSES.put(Type.MAP, Arrays.asList((Class) Map.class));
-        SCHEMA_TYPE_CLASSES.put(Type.STRUCT, Arrays.asList((Class) Struct.class));
+        SCHEMA_TYPE_CLASSES.put(Type.ARRAY, Collections.singletonList((Class) List.class));
+        SCHEMA_TYPE_CLASSES.put(Type.MAP, Collections.singletonList((Class) Map.class));
+        SCHEMA_TYPE_CLASSES.put(Type.STRUCT, Collections.singletonList((Class) Struct.class));
 
         for (Map.Entry<Type, List<Class>> schemaClasses : SCHEMA_TYPE_CLASSES.entrySet()) {
             for (Class<?> schemaClass : schemaClasses.getValue())
                 JAVA_CLASS_SCHEMA_TYPES.put(schemaClass, schemaClasses.getKey());
         }
 
-        LOGICAL_TYPE_CLASSES.put(Decimal.LOGICAL_NAME, Arrays.asList((Class) BigDecimal.class));
-        LOGICAL_TYPE_CLASSES.put(Date.LOGICAL_NAME, Arrays.asList((Class) java.util.Date.class));
-        LOGICAL_TYPE_CLASSES.put(Time.LOGICAL_NAME, Arrays.asList((Class) java.util.Date.class));
-        LOGICAL_TYPE_CLASSES.put(Timestamp.LOGICAL_NAME, Arrays.asList((Class) java.util.Date.class));
+        LOGICAL_TYPE_CLASSES.put(Decimal.LOGICAL_NAME, Collections.singletonList((Class) BigDecimal.class));
+        LOGICAL_TYPE_CLASSES.put(Date.LOGICAL_NAME, Collections.singletonList((Class) java.util.Date.class));
+        LOGICAL_TYPE_CLASSES.put(Time.LOGICAL_NAME, Collections.singletonList((Class) java.util.Date.class));
+        LOGICAL_TYPE_CLASSES.put(Timestamp.LOGICAL_NAME, Collections.singletonList((Class) java.util.Date.class));
         // We don't need to put these into JAVA_CLASS_SCHEMA_TYPES since that's only used to determine schemas for
         // schemaless data and logical types will have ambiguous schemas (e.g. many of them use the same Java class) so
         // they should not be used without schemas.
diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/storage/OffsetStorageReaderImpl.java b/connect/runtime/src/main/java/org/apache/kafka/connect/storage/OffsetStorageReaderImpl.java
index 53cc1a3..9f926dc 100644
--- a/connect/runtime/src/main/java/org/apache/kafka/connect/storage/OffsetStorageReaderImpl.java
+++ b/connect/runtime/src/main/java/org/apache/kafka/connect/storage/OffsetStorageReaderImpl.java
@@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -50,7 +51,7 @@ public class OffsetStorageReaderImpl implements OffsetStorageReader {
 
     @Override
     public <T> Map<String, Object> offset(Map<String, T> partition) {
-        return offsets(Arrays.asList(partition)).get(partition);
+        return offsets(Collections.singletonList(partition)).get(partition);
     }
 
     @Override
diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/tools/SchemaSourceTask.java b/connect/runtime/src/main/java/org/apache/kafka/connect/tools/SchemaSourceTask.java
index 6a51b52..e87851c 100644
--- a/connect/runtime/src/main/java/org/apache/kafka/connect/tools/SchemaSourceTask.java
+++ b/connect/runtime/src/main/java/org/apache/kafka/connect/tools/SchemaSourceTask.java
@@ -27,7 +27,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -154,7 +153,7 @@ public class SchemaSourceTask extends SourceTask {
             }
 
             System.out.println("{\"task\": " + id + ", \"seqno\": " + seqno + "}");
-            List<SourceRecord> result = Arrays.asList(srcRecord);
+            List<SourceRecord> result = Collections.singletonList(srcRecord);
             seqno++;
             count++;
             return result;
diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/tools/VerifiableSourceTask.java b/connect/runtime/src/main/java/org/apache/kafka/connect/tools/VerifiableSourceTask.java
index dfd8bac..c7ba96d 100644
--- a/connect/runtime/src/main/java/org/apache/kafka/connect/tools/VerifiableSourceTask.java
+++ b/connect/runtime/src/main/java/org/apache/kafka/connect/tools/VerifiableSourceTask.java
@@ -26,7 +26,6 @@ import org.apache.kafka.connect.source.SourceTask;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -115,7 +114,7 @@ public class VerifiableSourceTask extends SourceTask {
 
         Map<String, Long> ccOffset = Collections.singletonMap(SEQNO_FIELD, seqno);
         SourceRecord srcRecord = new SourceRecord(partition, ccOffset, topic, Schema.INT32_SCHEMA, id, Schema.INT64_SCHEMA, seqno);
-        List<SourceRecord> result = Arrays.asList(srcRecord);
+        List<SourceRecord> result = Collections.singletonList(srcRecord);
         seqno++;
         return result;
     }

-- 
To stop receiving notification emails like this one, please contact
['"commits@kafka.apache.org" <co...@kafka.apache.org>'].