You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by je...@apache.org on 2020/07/15 16:01:58 UTC

[hadoop] branch branch-3.1 updated: HADOOP-17101. Replace Guava Function with Java8+ Function

This is an automated email from the ASF dual-hosted git repository.

jeagles pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
     new 34a6cbb  HADOOP-17101. Replace Guava Function with Java8+ Function
34a6cbb is described below

commit 34a6cbb5bbedf53d618f3b3e3522f80e418e476c
Author: Ahmed Hussein <ah...@apache.org>
AuthorDate: Wed Jul 15 09:53:18 2020 -0500

    HADOOP-17101. Replace Guava Function with Java8+ Function
    
    Signed-off-by: Jonathan Eagles <je...@gmail.com>
    (cherry picked from commit 98fcffe93f9ef910654574f69591fcdc621523af)
    (cherry picked from commit 43a865dc07437e0f767a902c4cfe6ca0472bf57f)
---
 .../src/main/resources/checkstyle/checkstyle.xml   |  7 +++++-
 .../hdfs/server/blockmanagement/HostSet.java       | 25 ++++++++++----------
 .../hadoop/hdfs/server/namenode/JournalSet.java    | 22 +++++++++---------
 .../hadoop/hdfs/server/protocol/RemoteEditLog.java | 11 ++++-----
 .../hadoop/hdfs/server/namenode/ha/HATestUtil.java | 17 +++++---------
 .../mapreduce/lib/input/TestFileInputFormat.java   | 27 ++++++++--------------
 .../impl/pb/GetApplicationsRequestPBImpl.java      | 11 ++-------
 7 files changed, 50 insertions(+), 70 deletions(-)

diff --git a/hadoop-build-tools/src/main/resources/checkstyle/checkstyle.xml b/hadoop-build-tools/src/main/resources/checkstyle/checkstyle.xml
index 8f3d3f1..e0a55f7 100644
--- a/hadoop-build-tools/src/main/resources/checkstyle/checkstyle.xml
+++ b/hadoop-build-tools/src/main/resources/checkstyle/checkstyle.xml
@@ -119,7 +119,12 @@
 
         <!-- Checks for imports                              -->
         <!-- See http://checkstyle.sf.net/config_import.html -->
-        <module name="IllegalImport"/> <!-- defaults to sun.* packages -->
+        <module name="IllegalImport">
+          <property name="regexp" value="true"/>
+          <property name="illegalPkgs" value="^sun\.[^.]+"/>
+          <property name="illegalClasses"
+            value="^com\.google\.common\.base\.(Optional|Function), ^com\.google\.common\.collect\.(ImmutableListMultimap)"/>
+        </module>
         <module name="RedundantImport"/>
         <module name="UnusedImports"/>
 
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostSet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostSet.java
index 958557b..16f67ee 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostSet.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/HostSet.java
@@ -17,15 +17,12 @@
  */
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Iterators;
 import com.google.common.collect.Multimap;
 import com.google.common.collect.UnmodifiableIterator;
 
-import javax.annotation.Nullable;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.util.Collection;
@@ -101,14 +98,16 @@ public class HostSet implements Iterable<InetSocketAddress> {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder("HostSet(");
-    Joiner.on(",").appendTo(sb, Iterators.transform(iterator(),
-        new Function<InetSocketAddress, String>() {
-          @Override
-          public String apply(@Nullable InetSocketAddress addr) {
-            assert addr != null;
-            return addr.getAddress().getHostAddress() + ":" + addr.getPort();
-          }
-        }));
-    return sb.append(")").toString();
+    Iterator<InetSocketAddress> iter = iterator();
+    String sep = "";
+    while (iter.hasNext()) {
+      InetSocketAddress addr = iter.next();
+      sb.append(sep);
+      sb.append(addr.getAddress().getHostAddress());
+      sb.append(':');
+      sb.append(addr.getPort());
+      sep = ",";
+    }
+    return sb.append(')').toString();
   }
 }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
index e7f2adb..4b50531 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java
@@ -24,8 +24,10 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 import java.util.PriorityQueue;
 import java.util.SortedSet;
 import java.util.concurrent.CopyOnWriteArrayList;
@@ -38,12 +40,8 @@ import org.apache.hadoop.hdfs.server.common.StorageInfo;
 import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
-
 import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableListMultimap;
 import com.google.common.collect.Lists;
-import com.google.common.collect.Multimaps;
 import com.google.common.collect.Sets;
 
 /**
@@ -628,7 +626,7 @@ public class JournalSet implements JournalManager {
    */
   public synchronized RemoteEditLogManifest getEditLogManifest(long fromTxId) {
     // Collect RemoteEditLogs available from each FileJournalManager
-    List<RemoteEditLog> allLogs = Lists.newArrayList();
+    List<RemoteEditLog> allLogs = new ArrayList<>();
     for (JournalAndStream j : journals) {
       if (j.getManager() instanceof FileJournalManager) {
         FileJournalManager fjm = (FileJournalManager)j.getManager();
@@ -639,15 +637,17 @@ public class JournalSet implements JournalManager {
         }
       }
     }
-    
     // Group logs by their starting txid
-    ImmutableListMultimap<Long, RemoteEditLog> logsByStartTxId =
-      Multimaps.index(allLogs, RemoteEditLog.GET_START_TXID);
+    final Map<Long, List<RemoteEditLog>> logsByStartTxId = new HashMap<>();
+    allLogs.forEach(input -> {
+      long key = RemoteEditLog.GET_START_TXID.apply(input);
+      logsByStartTxId.computeIfAbsent(key, k-> new ArrayList<>()).add(input);
+    });
     long curStartTxId = fromTxId;
-
-    List<RemoteEditLog> logs = Lists.newArrayList();
+    List<RemoteEditLog> logs = new ArrayList<>();
     while (true) {
-      ImmutableList<RemoteEditLog> logGroup = logsByStartTxId.get(curStartTxId);
+      List<RemoteEditLog> logGroup =
+          logsByStartTxId.getOrDefault(curStartTxId, Collections.emptyList());
       if (logGroup.isEmpty()) {
         // we have a gap in logs - for example because we recovered some old
         // storage directory with ancient logs. Clear out any logs we've
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog.java
index 1d26bc4..427daf1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/RemoteEditLog.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hdfs.server.protocol;
 
-import com.google.common.base.Function;
 import com.google.common.collect.ComparisonChain;
+import java.util.function.Function;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
 
 public class RemoteEditLog implements Comparable<RemoteEditLog> {
@@ -82,16 +82,13 @@ public class RemoteEditLog implements Comparable<RemoteEditLog> {
   }
   
   /**
-   * Guava <code>Function</code> which applies {@link #getStartTxId()} 
+   * Java <code>Function</code> which applies {@link #getStartTxId()}
    */
   public static final Function<RemoteEditLog, Long> GET_START_TXID =
-    new Function<RemoteEditLog, Long>() {
-      @Override
-      public Long apply(RemoteEditLog log) {
+      log -> {
         if (null == log) {
           return HdfsServerConstants.INVALID_TXID;
         }
         return log.getStartTxId();
-      }
-    };
+      };
 }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
index b932746..daf7930 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/HATestUtil.java
@@ -37,11 +37,10 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.LongAccumulator;
 
-import com.google.common.base.Function;
 import com.google.common.base.Joiner;
-import com.google.common.collect.Iterables;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.ClientGSIContext;
@@ -303,15 +302,11 @@ public abstract class HATestUtil {
   public static <P extends FailoverProxyProvider<?>> void
       setFailoverConfigurations(Configuration conf, String logicalName,
       List<InetSocketAddress> nnAddresses, Class<P> classFPP) {
-    setFailoverConfigurations(conf, logicalName,
-        Iterables.transform(nnAddresses, new Function<InetSocketAddress, String>() {
-
-          // transform the inet address to a simple string
-          @Override
-          public String apply(InetSocketAddress addr) {
-            return "hdfs://" + addr.getHostName() + ":" + addr.getPort();
-          }
-        }), classFPP);
+    final List<String> addresses = new ArrayList();
+    nnAddresses.forEach(
+        addr -> addresses.add(
+            "hdfs://" + addr.getHostName() + ":" + addr.getPort()));
+    setFailoverConfigurations(conf, logicalName, addresses, classFPP);
   }
 
   public static <P extends FailoverProxyProvider<?>>
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java
index 3897a9b..104aacc 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java
@@ -23,8 +23,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 import java.util.Set;
-
-import javax.annotation.Nullable;
+import java.util.stream.Collectors;
 
 import org.junit.Assert;
 
@@ -49,8 +48,6 @@ import org.junit.runners.Parameterized.Parameters;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 
@@ -353,13 +350,10 @@ public class TestFileInputFormat {
       List<FileStatus> fetchedStatuses, final FileSystem localFs) {
     Assert.assertEquals(expectedPaths.size(), fetchedStatuses.size());
 
-    Iterable<Path> fqExpectedPaths = Iterables.transform(expectedPaths,
-        new Function<Path, Path>() {
-          @Override
-          public Path apply(Path input) {
-            return localFs.makeQualified(input);
-          }
-        });
+    Iterable<Path> fqExpectedPaths =
+        expectedPaths.stream().map(
+            input -> localFs.makeQualified(input)).collect(Collectors.toList());
+
 
     Set<Path> expectedPathSet = Sets.newHashSet(fqExpectedPaths);
     for (FileStatus fileStatus : fetchedStatuses) {
@@ -374,13 +368,10 @@ public class TestFileInputFormat {
 
 
   private void verifySplits(List<String> expected, List<InputSplit> splits) {
-    Iterable<String> pathsFromSplits = Iterables.transform(splits,
-        new Function<InputSplit, String>() {
-          @Override
-          public String apply(@Nullable InputSplit input) {
-            return ((FileSplit) input).getPath().toString();
-          }
-        });
+    Iterable<String> pathsFromSplits =
+        splits.stream().map(
+            input-> ((FileSplit) input).getPath().toString())
+            .collect(Collectors.toList());
 
     Set<String> expectedSet = Sets.newHashSet(expected);
     for (String splitPathString : pathsFromSplits) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
index bc6be80..995f000 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
@@ -35,8 +35,6 @@ import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProtoOrBuilder;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
 import com.google.protobuf.TextFormat;
 
 @Private
@@ -87,13 +85,8 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
     }
     if (applicationStates != null && !applicationStates.isEmpty()) {
       builder.clearApplicationStates();
-      builder.addAllApplicationStates(Iterables.transform(applicationStates,
-          new Function<YarnApplicationState, YarnApplicationStateProto>() {
-            @Override
-            public YarnApplicationStateProto apply(YarnApplicationState input) {
-              return ProtoUtils.convertToProtoFormat(input);
-            }
-          }));
+      applicationStates.forEach(input ->
+          builder.addApplicationStates(ProtoUtils.convertToProtoFormat(input)));
     }
     if (applicationTags != null && !applicationTags.isEmpty()) {
       builder.clearApplicationTags();


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org