You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by he...@apache.org on 2020/09/11 07:44:24 UTC
[hadoop] branch trunk updated: HDFS-15551. Tiny Improve for
DeadNode detector (#2265)
This is an automated email from the ASF dual-hosted git repository.
hexiaoqiao pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push:
new 89428f1 HDFS-15551. Tiny Improve for DeadNode detector (#2265)
89428f1 is described below
commit 89428f142fe7cee17bd1a0f5f207b6952ec79d32
Author: imbajin <im...@users.noreply.github.com>
AuthorDate: Fri Sep 11 15:44:03 2020 +0800
HDFS-15551. Tiny Improve for DeadNode detector (#2265)
Contributed by imbajin.
Reviewed-by: leosunli <li...@gmail.com>
Signed-off-by: He Xiaoqiao <he...@apache.org>
---
.../src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java | 3 +++
.../main/java/org/apache/hadoop/hdfs/DeadNodeDetector.java | 11 ++++++-----
2 files changed, 9 insertions(+), 5 deletions(-)
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
index 402c382..e5efbb8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
@@ -181,10 +181,13 @@ public class DFSInputStream extends FSInputStream
private byte[] oneByteBuf; // used for 'int read()'
protected void addToLocalDeadNodes(DatanodeInfo dnInfo) {
+ DFSClient.LOG.debug("Add {} to local dead nodes, previously was {}.",
+ dnInfo, deadNodes);
deadNodes.put(dnInfo, dnInfo);
}
protected void removeFromLocalDeadNodes(DatanodeInfo dnInfo) {
+ DFSClient.LOG.debug("Remove {} from local dead nodes.", dnInfo);
deadNodes.remove(dnInfo);
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DeadNodeDetector.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DeadNodeDetector.java
index a573e8a..aaa12db 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DeadNodeDetector.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DeadNodeDetector.java
@@ -294,7 +294,7 @@ public class DeadNodeDetector implements Runnable {
}
/**
- * Prode datanode by probe byte.
+ * Prode datanode by probe type.
*/
private void scheduleProbe(ProbeType type) {
LOG.debug("Schedule probe datanode for probe type: {}.", type);
@@ -376,9 +376,8 @@ public class DeadNodeDetector implements Runnable {
} catch (Exception e) {
LOG.error("Probe failed, datanode: {}, type: {}.", datanodeInfo, type,
e);
+ deadNodeDetector.probeCallBack(this, false);
}
-
- deadNodeDetector.probeCallBack(this, false);
}
}
@@ -402,7 +401,7 @@ public class DeadNodeDetector implements Runnable {
}
} else {
if (probe.getType() == ProbeType.CHECK_SUSPECT) {
- LOG.info("Add the node to dead node list: {}.",
+ LOG.warn("Probe failed, add suspect node to dead node list: {}.",
probe.getDatanodeInfo());
addToDead(probe.getDatanodeInfo());
}
@@ -415,11 +414,12 @@ public class DeadNodeDetector implements Runnable {
private void checkDeadNodes() {
Set<DatanodeInfo> datanodeInfos = clearAndGetDetectedDeadNodes();
for (DatanodeInfo datanodeInfo : datanodeInfos) {
- LOG.debug("Add dead node to check: {}.", datanodeInfo);
if (!deadNodesProbeQueue.offer(datanodeInfo)) {
LOG.debug("Skip to add dead node {} to check " +
"since the probe queue is full.", datanodeInfo);
break;
+ } else {
+ LOG.debug("Add dead node to check: {}.", datanodeInfo);
}
}
state = State.IDLE;
@@ -475,6 +475,7 @@ public class DeadNodeDetector implements Runnable {
datanodeInfos.add(datanodeInfo);
}
+ LOG.debug("Add datanode {} to suspectAndDeadNodes.", datanodeInfo);
addSuspectNodeToDetect(datanodeInfo);
}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org