You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by aa...@apache.org on 2020/12/11 04:58:34 UTC
[hadoop] branch branch-3.3 updated: HADOOP-17138. Fix spotbugs
warnings surfaced after upgrade to 4.0.6. (#2155) (#2538)
This is an automated email from the ASF dual-hosted git repository.
aajisaka pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/branch-3.3 by this push:
new 71bda1a HADOOP-17138. Fix spotbugs warnings surfaced after upgrade to 4.0.6. (#2155) (#2538)
71bda1a is described below
commit 71bda1a2e8d60ec34645acd87d032c913bc8d83a
Author: Akira Ajisaka <aa...@apache.org>
AuthorDate: Fri Dec 11 13:58:02 2020 +0900
HADOOP-17138. Fix spotbugs warnings surfaced after upgrade to 4.0.6. (#2155) (#2538)
(cherry picked from commit 1b29c9bfeee0035dd042357038b963843169d44c)
Co-authored-by: Masatake Iwasaki <iw...@apache.org>
---
.../hadoop-cos/dev-support/findbugs-exclude.xml | 4 +--
.../main/java/org/apache/hadoop/ipc/Server.java | 4 +--
.../datanode/checker/DatasetVolumeChecker.java | 36 +++++++++++++---------
.../datanode/checker/ThrottledAsyncChecker.java | 2 +-
.../hdfs/server/namenode/FSEditLogLoader.java | 2 +-
.../dev-support/findbugs-exclude.xml | 14 ++++++++-
.../java/org/apache/hadoop/yarn/sls/SLSRunner.java | 2 +-
.../hadoop-yarn/dev-support/findbugs-exclude.xml | 6 ++++
.../storage/TestTimelineReaderHBaseDown.java | 7 ++---
9 files changed, 50 insertions(+), 27 deletions(-)
diff --git a/hadoop-cloud-storage-project/hadoop-cos/dev-support/findbugs-exclude.xml b/hadoop-cloud-storage-project/hadoop-cos/dev-support/findbugs-exclude.xml
index e647e67..f8c3472 100644
--- a/hadoop-cloud-storage-project/hadoop-cos/dev-support/findbugs-exclude.xml
+++ b/hadoop-cloud-storage-project/hadoop-cos/dev-support/findbugs-exclude.xml
@@ -16,8 +16,8 @@
-->
<FindBugsFilter>
<Match>
- <Class name="org.apache.hadoop.fs.cosn.CosNInputStream.ReadBuffer"/>
+ <Class name="org.apache.hadoop.fs.cosn.CosNInputStream$ReadBuffer"/>
<Method name="getBuffer"/>
- <Bug pattern="EI_EXPOSE_REP"/>h_LIB
+ <Bug pattern="EI_EXPOSE_REP"/>
</Match>
</FindBugsFilter>
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
index d6a19a0..68d4923 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
@@ -3728,7 +3728,7 @@ public abstract class Server {
if (count == null) {
count = 1;
} else {
- count++;
+ count = count + 1;
}
userToConnectionsMap.put(user, count);
}
@@ -3740,7 +3740,7 @@ public abstract class Server {
if (count == null) {
return;
} else {
- count--;
+ count = count - 1;
}
if (count == 0) {
userToConnectionsMap.remove(user);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/DatasetVolumeChecker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/DatasetVolumeChecker.java
index 7bea216..d077d21 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/DatasetVolumeChecker.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/DatasetVolumeChecker.java
@@ -354,23 +354,29 @@ public class DatasetVolumeChecker {
}
@Override
- public void onSuccess(@Nonnull VolumeCheckResult result) {
- switch(result) {
- case HEALTHY:
- case DEGRADED:
- LOG.debug("Volume {} is {}.", reference.getVolume(), result);
- markHealthy();
- break;
- case FAILED:
- LOG.warn("Volume {} detected as being unhealthy",
+ public void onSuccess(VolumeCheckResult result) {
+ if (result == null) {
+ LOG.error("Unexpected health check result null for volume {}",
reference.getVolume());
- markFailed();
- break;
- default:
- LOG.error("Unexpected health check result {} for volume {}",
- result, reference.getVolume());
markHealthy();
- break;
+ } else {
+ switch(result) {
+ case HEALTHY:
+ case DEGRADED:
+ LOG.debug("Volume {} is {}.", reference.getVolume(), result);
+ markHealthy();
+ break;
+ case FAILED:
+ LOG.warn("Volume {} detected as being unhealthy",
+ reference.getVolume());
+ markFailed();
+ break;
+ default:
+ LOG.error("Unexpected health check result {} for volume {}",
+ result, reference.getVolume());
+ markHealthy();
+ break;
+ }
}
cleanup();
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/ThrottledAsyncChecker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/ThrottledAsyncChecker.java
index 1ded5cb..4ad32ae 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/ThrottledAsyncChecker.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/checker/ThrottledAsyncChecker.java
@@ -166,7 +166,7 @@ public class ThrottledAsyncChecker<K, V> implements AsyncChecker<K, V> {
Checkable<K, V> target, ListenableFuture<V> lf) {
Futures.addCallback(lf, new FutureCallback<V>() {
@Override
- public void onSuccess(@Nullable V result) {
+ public void onSuccess(V result) {
synchronized (ThrottledAsyncChecker.this) {
checksInProgress.remove(target);
completedChecks.put(target, new LastCheckResult<>(
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
index 55af300..2ac0eb1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
@@ -1238,7 +1238,7 @@ public class FSEditLogLoader {
holder = new Holder<Integer>(1);
opCounts.put(opCode, holder);
} else {
- holder.held++;
+ holder.held = holder.held + 1;
}
counter.increment();
}
diff --git a/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml b/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
index 9b4d8c9..4e459b6 100644
--- a/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
+++ b/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
@@ -533,5 +533,17 @@
<Class name="org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage$1" />
<Bug pattern="SE_BAD_FIELD_INNER_CLASS" />
</Match>
-
+
+ <!--
+ HADOOP-17138: Suppress warnings about unchecked Nullable
+ since the methoad catches NullPointerException then registerError.
+ -->
+ <Match>
+ <Or>
+ <Class name="org.apache.hadoop.mapred.LocatedFileStatusFetcher$ProcessInputDirCallback" />
+ <Class name="org.apache.hadoop.mapred.LocatedFileStatusFetcher$ProcessInitialInputPathCallback" />
+ </Or>
+ <Method name="onSuccess" />
+ <Bug pattern="NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE" />
+ </Match>
</FindBugsFilter>
diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
index 6f75bd1..5bfa8dc 100644
--- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
+++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
@@ -813,7 +813,7 @@ public class SLSRunner extends Configured implements Tool {
if (appNum == null) {
appNum = 1;
} else {
- appNum++;
+ appNum = appNum + 1;
}
queueAppNumMap.put(queueName, appNum);
diff --git a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
index 3a37293..95706f9 100644
--- a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
+++ b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
@@ -705,4 +705,10 @@
<Method name="getDevices" />
<Bug pattern="DMI_HARDCODED_ABSOLUTE_FILENAME" />
</Match>
+
+ <!-- Suppress warning about anonymous class for mocking. -->
+ <Match>
+ <Class name="~org\.apache\.hadoop\.yarn\.server\.timelineservice\.reader\.TestTimelineReaderWebServicesHBaseStorage.*" />
+ <Bug pattern="UMAC_UNCALLABLE_METHOD_OF_ANONYMOUS_CLASS" />
+ </Match>
</FindBugsFilter>
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestTimelineReaderHBaseDown.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestTimelineReaderHBaseDown.java
index 1148b80..d83f130 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestTimelineReaderHBaseDown.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestTimelineReaderHBaseDown.java
@@ -181,14 +181,13 @@ public class TestTimelineReaderHBaseDown {
}
}
- private static void checkQuery(HBaseTimelineReaderImpl htr) throws
- IOException {
+ private static Set<TimelineEntity> checkQuery(HBaseTimelineReaderImpl htr)
+ throws IOException {
TimelineReaderContext context =
new TimelineReaderContext(YarnConfiguration.DEFAULT_RM_CLUSTER_ID,
null, null, null, null, TimelineEntityType
.YARN_FLOW_ACTIVITY.toString(), null, null);
- Set<TimelineEntity> entities = htr.getEntities(context, MONITOR_FILTERS,
- DATA_TO_RETRIEVE);
+ return htr.getEntities(context, MONITOR_FILTERS, DATA_TO_RETRIEVE);
}
private static void configure(HBaseTestingUtility util) {
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org