You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by gi...@apache.org on 2019/03/28 18:16:53 UTC

[hadoop] branch trunk updated: HDFS-14395. Remove WARN Logging From Interrupts. Contributed by David Mollitor.

This is an automated email from the ASF dual-hosted git repository.

gifuma pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 49b02d4  HDFS-14395. Remove WARN Logging From Interrupts. Contributed by David Mollitor.
49b02d4 is described below

commit 49b02d4a9bf9beac19f716488348ea4e30563ff4
Author: Giovanni Matteo Fumarola <gi...@apache.org>
AuthorDate: Thu Mar 28 11:16:01 2019 -0700

    HDFS-14395. Remove WARN Logging From Interrupts. Contributed by David Mollitor.
---
 .../java/org/apache/hadoop/hdfs/DataStreamer.java  | 25 +++++++++++-----------
 1 file changed, 13 insertions(+), 12 deletions(-)

diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java
index 4c733bf..0029a76 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DataStreamer.java
@@ -680,7 +680,7 @@ class DataStreamer extends Daemon {
             try {
               dataQueue.wait(timeout);
             } catch (InterruptedException  e) {
-              LOG.warn("Caught exception", e);
+              LOG.debug("Thread interrupted", e);
             }
             doSleep = false;
             now = Time.monotonicNow();
@@ -695,7 +695,7 @@ class DataStreamer extends Daemon {
             try {
               backOffIfNecessary();
             } catch (InterruptedException e) {
-              LOG.warn("Caught exception", e);
+              LOG.debug("Thread interrupted", e);
             }
             one = dataQueue.getFirst(); // regular data packet
             SpanId[] parents = one.getTraceParents();
@@ -708,9 +708,8 @@ class DataStreamer extends Daemon {
         }
 
         // get new block from namenode.
-        if (LOG.isDebugEnabled()) {
-          LOG.debug("stage=" + stage + ", " + this);
-        }
+        LOG.debug("stage={}, {}", stage, this);
+
         if (stage == BlockConstructionStage.PIPELINE_SETUP_CREATE) {
           LOG.debug("Allocating new block: {}", this);
           setPipeline(nextBlockOutputStream());
@@ -738,7 +737,7 @@ class DataStreamer extends Daemon {
                 // wait for acks to arrive from datanodes
                 dataQueue.wait(1000);
               } catch (InterruptedException  e) {
-                LOG.warn("Caught exception", e);
+                LOG.debug("Thread interrupted", e);
               }
             }
           }
@@ -893,6 +892,7 @@ class DataStreamer extends Daemon {
         }
         checkClosed();
       } catch (ClosedChannelException cce) {
+        LOG.debug("Closed channel exception", cce);
       }
       long duration = Time.monotonicNow() - begin;
       if (duration > dfsclientSlowLogThresholdMs) {
@@ -946,7 +946,8 @@ class DataStreamer extends Daemon {
         }
         checkClosed();
         queuePacket(packet);
-      } catch (ClosedChannelException ignored) {
+      } catch (ClosedChannelException cce) {
+        LOG.debug("Closed channel exception", cce);
       }
     }
   }
@@ -985,7 +986,8 @@ class DataStreamer extends Daemon {
         response.close();
         response.join();
       } catch (InterruptedException  e) {
-        LOG.warn("Caught exception", e);
+        LOG.debug("Thread interrupted", e);
+        Thread.currentThread().interrupt();
       } finally {
         response = null;
       }
@@ -1097,9 +1099,7 @@ class DataStreamer extends Daemon {
             }
           }
 
-          if (LOG.isDebugEnabled()) {
-            LOG.debug("DFSClient {}", ack);
-          }
+          LOG.debug("DFSClient {}", ack);
 
           long seqno = ack.getSeqno();
           // processes response status from datanodes.
@@ -1616,7 +1616,8 @@ class DataStreamer extends Daemon {
         // good reports should follow bad ones, if client committed
         // with those nodes.
         Thread.sleep(2000);
-      } catch (InterruptedException ignored) {
+      } catch (InterruptedException e) {
+        LOG.debug("Thread interrupted", e);
       }
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org