You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by at...@apache.org on 2013/01/11 04:44:36 UTC
svn commit: r1431867 - in
/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./
src/main/java/org/apache/hadoop/hdfs/server/datanode/
src/main/java/org/apache/hadoop/hdfs/util/
src/test/java/org/apache/hadoop/hdfs/
Author: atm
Date: Fri Jan 11 03:44:36 2013
New Revision: 1431867
URL: http://svn.apache.org/viewvc?rev=1431867&view=rev
Log:
HDFS-4328. TestLargeBlock#testLargeBlockSize is timing out. Contributed by Chris Nauroth.
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/DataTransferThrottler.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLargeBlock.java
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1431867&r1=1431866&r2=1431867&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Fri Jan 11 03:44:36 2013
@@ -299,6 +299,9 @@ Trunk (Unreleased)
HDFS-4382. Fix typo MAX_NOT_CHANGED_INTERATIONS. (Ted Yu via suresh)
+ HDFS-4328. TestLargeBlock#testLargeBlockSize is timing out. (Chris Nauroth
+ via atm)
+
Release 2.0.3-alpha - Unreleased
INCOMPATIBLE CHANGES
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java?rev=1431867&r1=1431866&r2=1431867&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java Fri Jan 11 03:44:36 2013
@@ -648,7 +648,7 @@ class BlockSender implements java.io.Clo
ByteBuffer pktBuf = ByteBuffer.allocate(pktBufSize);
- while (endOffset > offset) {
+ while (endOffset > offset && !Thread.currentThread().isInterrupted()) {
manageOsCache();
long len = sendPacket(pktBuf, maxChunksPerPacket, streamForSendChunks,
transferTo, throttler);
@@ -656,16 +656,19 @@ class BlockSender implements java.io.Clo
totalRead += len + (numberOfChunks(len) * checksumSize);
seqno++;
}
- try {
- // send an empty packet to mark the end of the block
- sendPacket(pktBuf, maxChunksPerPacket, streamForSendChunks, transferTo,
- throttler);
- out.flush();
- } catch (IOException e) { //socket error
- throw ioeToSocketException(e);
- }
+ // If this thread was interrupted, then it did not send the full block.
+ if (!Thread.currentThread().isInterrupted()) {
+ try {
+ // send an empty packet to mark the end of the block
+ sendPacket(pktBuf, maxChunksPerPacket, streamForSendChunks, transferTo,
+ throttler);
+ out.flush();
+ } catch (IOException e) { //socket error
+ throw ioeToSocketException(e);
+ }
- sentEntireByteRange = true;
+ sentEntireByteRange = true;
+ }
} finally {
if (clientTraceFmt != null) {
final long endTime = System.nanoTime();
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/DataTransferThrottler.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/DataTransferThrottler.java?rev=1431867&r1=1431866&r2=1431867&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/DataTransferThrottler.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/DataTransferThrottler.java Fri Jan 11 03:44:36 2013
@@ -96,7 +96,12 @@ public class DataTransferThrottler {
// Wait for next period so that curReserve can be increased.
try {
wait( curPeriodEnd - now );
- } catch (InterruptedException ignored) {}
+ } catch (InterruptedException e) {
+ // Abort throttle and reset interrupted status to make sure other
+ // interrupt handling higher in the call stack executes.
+ Thread.currentThread().interrupt();
+ break;
+ }
} else if ( now < (curPeriodStart + periodExtension)) {
curPeriodStart = curPeriodEnd;
curReserve += bytesPerPeriod;
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLargeBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLargeBlock.java?rev=1431867&r1=1431866&r2=1431867&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLargeBlock.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLargeBlock.java Fri Jan 11 03:44:36 2013
@@ -158,7 +158,7 @@ public class TestLargeBlock {
* Test for block size of 2GB + 512B
* @throws IOException in case of errors
*/
- @Test
+ @Test(timeout = 120000)
public void testLargeBlockSize() throws IOException {
final long blockSize = 2L * 1024L * 1024L * 1024L + 512L; // 2GB + 512B
runTest(blockSize);