You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by sz...@apache.org on 2009/10/20 02:16:25 UTC
svn commit: r826906 - in /hadoop/hdfs/branches/branch-0.21: ./
src/java/org/apache/hadoop/hdfs/server/datanode/
src/java/org/apache/hadoop/hdfs/server/namenode/
src/test/aop/org/apache/hadoop/fi/
src/test/aop/org/apache/hadoop/hdfs/server/datanode/
Author: szetszwo
Date: Tue Oct 20 00:16:25 2009
New Revision: 826906
URL: http://svn.apache.org/viewvc?rev=826906&view=rev
Log:
HDFS-716. Define a pointcut for pipeline close and add a few fault injection tests.
Modified:
hadoop/hdfs/branches/branch-0.21/CHANGES.txt
hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlockInfoUnderConstruction.java
hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
Modified: hadoop/hdfs/branches/branch-0.21/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/CHANGES.txt?rev=826906&r1=826905&r2=826906&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/CHANGES.txt (original)
+++ hadoop/hdfs/branches/branch-0.21/CHANGES.txt Tue Oct 20 00:16:25 2009
@@ -264,13 +264,16 @@
HDFS-705. Create an adapter to access some of package-private methods of
DataNode from tests (cos)
- HDFS-710. Added actions with constraints to the pipeline fault injection
- tests and changed SleepAction to support uniform random sleeping over an
+ HDFS-710. Add actions with constraints to the pipeline fault injection
+ tests and change SleepAction to support uniform random sleeping over an
interval. (szetszwo)
HDFS-713. Need to properly check the type of the test class from an aspect
(cos)
+ HDFS-716. Define a pointcut for pipeline close and add a few fault
+ injection tests. (szetszwo)
+
BUG FIXES
HDFS-76. Better error message to users when commands fail because of
Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java?rev=826906&r1=826905&r2=826906&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java Tue Oct 20 00:16:25 2009
@@ -431,6 +431,15 @@
int endOfHeader = buf.position();
buf.reset();
+ return receivePacket(offsetInBlock, seqno, lastPacketInBlock, len, endOfHeader);
+ }
+
+ /**
+ * Receives and processes a packet. It can contain many chunks.
+ * returns the number of data bytes that the packet has.
+ */
+ private int receivePacket(long offsetInBlock, long seqno,
+ boolean lastPacketInBlock, int len, int endOfHeader) throws IOException {
if (LOG.isDebugEnabled()){
LOG.debug("Receiving one packet for block " + block +
" of length " + len +
Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlockInfoUnderConstruction.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlockInfoUnderConstruction.java?rev=826906&r1=826905&r2=826906&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlockInfoUnderConstruction.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlockInfoUnderConstruction.java Tue Oct 20 00:16:25 2009
@@ -110,6 +110,18 @@
// Sufficient to rely on super's implementation
return (this == obj) || super.equals(obj);
}
+
+ /** {@inheritDoc} */
+ @Override
+ public String toString() {
+ final StringBuilder b = new StringBuilder(getClass().getSimpleName());
+ b.append("[")
+ .append(expectedLocation)
+ .append("|")
+ .append(state)
+ .append("]");
+ return b.toString();
+ }
}
/**
@@ -255,11 +267,10 @@
/** {@inheritDoc} */
@Override
public String toString() {
- final StringBuilder b = new StringBuilder(getClass().getSimpleName());
- b.append("{")
- .append("\n blockUCState=").append(blockUCState)
- .append("\n replicas=").append(replicas)
- .append("\n primaryNodeIndex=").append(primaryNodeIndex)
+ final StringBuilder b = new StringBuilder(super.toString());
+ b.append("{blockUCState=").append(blockUCState)
+ .append(", primaryNodeIndex=").append(primaryNodeIndex)
+ .append(", replicas=").append(replicas)
.append("}");
return b.toString();
}
Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java?rev=826906&r1=826905&r2=826906&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java Tue Oct 20 00:16:25 2009
@@ -65,6 +65,10 @@
/** Simulate action for the statusRead pointcut */
public final ActionContainer<DatanodeID> fiStatusRead
= new ActionContainer<DatanodeID>();
+ /** Simulate action for the pipelineClose pointcut */
+ public final ActionContainer<DatanodeID> fiPipelineClose
+ = new ActionContainer<DatanodeID>();
+
/** Verification action for the pipelineInitNonAppend pointcut */
public final ActionContainer<Integer> fiPipelineInitErrorNonAppend
= new ActionContainer<Integer>();
Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj?rev=826906&r1=826905&r2=826906&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj (original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj Tue Oct 20 00:16:25 2009
@@ -22,9 +22,9 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest;
import org.apache.hadoop.fi.DataTransferTestUtil;
import org.apache.hadoop.fi.ProbabilityModel;
+import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest;
import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
/**
@@ -57,4 +57,29 @@
thisJoinPoint.getStaticPart( ).getSourceLocation());
}
}
+
+ pointcut pipelineClose(BlockReceiver blockreceiver, long offsetInBlock, long seqno,
+ boolean lastPacketInBlock, int len, int endOfHeader) :
+ call (* BlockReceiver.receivePacket(long, long, boolean, int, int))
+ && this(blockreceiver)
+ && args(offsetInBlock, seqno, lastPacketInBlock, len, endOfHeader);
+
+ before(BlockReceiver blockreceiver, long offsetInBlock, long seqno,
+ boolean lastPacketInBlock, int len, int endOfHeader
+ ) throws IOException : pipelineClose(blockreceiver, offsetInBlock, seqno,
+ lastPacketInBlock, len, endOfHeader) {
+ if (len == 0) {
+ LOG.info("FI: pipelineClose, offsetInBlock=" + offsetInBlock
+ + ", seqno=" + seqno
+ + ", lastPacketInBlock=" + lastPacketInBlock
+ + ", len=" + len
+ + ", endOfHeader=" + endOfHeader);
+
+ final DataTransferTest test = DataTransferTestUtil.getDataTransferTest();
+ if (test != null) {
+ test.fiPipelineClose.run(
+ blockreceiver.getDataNode().getDatanodeRegistration());
+ }
+ }
+ }
}
Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java?rev=826906&r1=826905&r2=826906&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java Tue Oct 20 00:16:25 2009
@@ -300,4 +300,49 @@
final String methodName = FiTestUtil.getMethodName();
runCallReceivePacketTest(methodName, 2, new DoosAction(methodName, 2));
}
-}
+
+ private static void runPipelineCloseTest(String methodName,
+ Action<DatanodeID> a) throws IOException {
+ FiTestUtil.LOG.info("Running " + methodName + " ...");
+ final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
+ .initTest();
+ t.fiPipelineClose.set(a);
+ write1byte(methodName);
+ }
+
+ /**
+ * Pipeline close:
+ * DN0 throws an OutOfMemoryException
+ * right after it received a close request from client.
+ * Client gets an IOException and determine DN0 bad.
+ */
+ @Test
+ public void pipeline_Fi_44() throws IOException {
+ final String methodName = FiTestUtil.getMethodName();
+ runPipelineCloseTest(methodName, new OomAction(methodName, 0));
+ }
+
+ /**
+ * Pipeline close:
+ * DN1 throws an OutOfMemoryException
+ * right after it received a close request from client.
+ * Client gets an IOException and determine DN1 bad.
+ */
+ @Test
+ public void pipeline_Fi_45() throws IOException {
+ final String methodName = FiTestUtil.getMethodName();
+ runPipelineCloseTest(methodName, new OomAction(methodName, 1));
+ }
+
+ /**
+ * Pipeline close:
+ * DN2 throws an OutOfMemoryException
+ * right after it received a close request from client.
+ * Client gets an IOException and determine DN2 bad.
+ */
+ @Test
+ public void pipeline_Fi_46() throws IOException {
+ final String methodName = FiTestUtil.getMethodName();
+ runPipelineCloseTest(methodName, new OomAction(methodName, 2));
+ }
+}
\ No newline at end of file