You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ki...@apache.org on 2016/10/14 21:38:07 UTC

hadoop git commit: Revert "HDFS-10960. TestDataNodeHotSwapVolumes#testRemoveVolumeBeingWritten fails at disk error verification after volume remove. (Manoj Govindassamy via lei)"

Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 f981dd1bc -> 8d1fbf786


Revert "HDFS-10960. TestDataNodeHotSwapVolumes#testRemoveVolumeBeingWritten fails at disk error verification after volume remove. (Manoj Govindassamy via lei)"

This reverts commit f981dd1bca1006f34f55361ba0d72e5d0a621753.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8d1fbf78
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8d1fbf78
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8d1fbf78

Branch: refs/heads/branch-2.8
Commit: 8d1fbf786b8288710a2eb0fe7ad7c9d852017e94
Parents: f981dd1
Author: Kihwal Lee <ki...@apache.org>
Authored: Fri Oct 14 16:37:51 2016 -0500
Committer: Kihwal Lee <ki...@apache.org>
Committed: Fri Oct 14 16:37:51 2016 -0500

----------------------------------------------------------------------
 .../datanode/TestDataNodeHotSwapVolumes.java     | 19 +++++++------------
 1 file changed, 7 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8d1fbf78/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
index 158efd6..c03b02b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
@@ -646,6 +646,8 @@ public class TestDataNodeHotSwapVolumes {
     final DataNode dn = cluster.getDataNodes().get(dataNodeIdx);
     final FileSystem fs = cluster.getFileSystem();
     final Path testFile = new Path("/test");
+    final long lastTimeDiskErrorCheck = dn.getLastDiskErrorCheck();
+
     FSDataOutputStream out = fs.create(testFile, REPLICATION);
 
     Random rb = new Random(0);
@@ -701,24 +703,17 @@ public class TestDataNodeHotSwapVolumes {
 
     reconfigThread.join();
 
-    // Verify if the data directory reconfigure was successful
-    FsDatasetSpi<? extends FsVolumeSpi> fsDatasetSpi = dn.getFSDataset();
-    try (FsDatasetSpi.FsVolumeReferences fsVolumeReferences = fsDatasetSpi
-        .getFsVolumeReferences()) {
-      for (int i =0; i < fsVolumeReferences.size(); i++) {
-        System.out.println("Vol: " +
-            fsVolumeReferences.get(i).getBaseURI().toString());
-      }
-      assertEquals("Volume remove wasn't successful.",
-          1, fsVolumeReferences.size());
-    }
-
     // Verify the file has sufficient replications.
     DFSTestUtil.waitReplication(fs, testFile, REPLICATION);
     // Read the content back
     byte[] content = DFSTestUtil.readFileBuffer(fs, testFile);
     assertEquals(BLOCK_SIZE, content.length);
 
+    // If an IOException thrown from BlockReceiver#run, it triggers
+    // DataNode#checkDiskError(). So we can test whether checkDiskError() is called,
+    // to see whether there is IOException in BlockReceiver#run().
+    assertEquals(lastTimeDiskErrorCheck, dn.getLastDiskErrorCheck());
+
     if (!exceptions.isEmpty()) {
       throw new IOException(exceptions.get(0).getCause());
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org