You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by ha...@apache.org on 2010/06/30 16:30:24 UTC
svn commit: r959330 - in /hadoop/hdfs/branches/branch-0.21: ./
src/c++/libhdfs/ src/contrib/hdfsproxy/ src/java/
src/java/org/apache/hadoop/hdfs/
src/java/org/apache/hadoop/hdfs/server/datanode/ src/test/hdfs/
src/test/hdfs/org/apache/hadoop/hdfs/ src/...
Author: hairong
Date: Wed Jun 30 14:30:23 2010
New Revision: 959330
URL: http://svn.apache.org/viewvc?rev=959330&view=rev
Log:
Merge -r959323:959324 to move the change made by HDFS-1057 from main to 0.21.
Added:
hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ChunkChecksum.java
- copied unchanged from r959324, hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ChunkChecksum.java
hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
- copied unchanged from r959324, hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
Modified:
hadoop/hdfs/branches/branch-0.21/ (props changed)
hadoop/hdfs/branches/branch-0.21/CHANGES.txt
hadoop/hdfs/branches/branch-0.21/build.xml (props changed)
hadoop/hdfs/branches/branch-0.21/src/c++/libhdfs/ (props changed)
hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy/ (props changed)
hadoop/hdfs/branches/branch-0.21/src/java/ (props changed)
hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/DFSInputStream.java
hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java
hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipeline.java
hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipelineInterface.java
hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java (props changed)
hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaNotFoundException.java
hadoop/hdfs/branches/branch-0.21/src/test/hdfs/ (props changed)
hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java
hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
hadoop/hdfs/branches/branch-0.21/src/webapps/datanode/ (props changed)
hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs/ (props changed)
hadoop/hdfs/branches/branch-0.21/src/webapps/secondary/ (props changed)
Propchange: hadoop/hdfs/branches/branch-0.21/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun 30 14:30:23 2010
@@ -1,4 +1,4 @@
/hadoop/core/branches/branch-0.19/hdfs:713112
/hadoop/hdfs/branches/HDFS-265:796829-820463
/hadoop/hdfs/branches/branch-0.21:820487
-/hadoop/hdfs/trunk:947194,950323,952861
+/hadoop/hdfs/trunk:947194,950323,952861,959324
Modified: hadoop/hdfs/branches/branch-0.21/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/CHANGES.txt?rev=959330&r1=959329&r2=959330&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/CHANGES.txt (original)
+++ hadoop/hdfs/branches/branch-0.21/CHANGES.txt Wed Jun 30 14:30:23 2010
@@ -894,6 +894,9 @@ Release 0.21.0 - Unreleased
HDFS-1256. libhdfs is missing from the tarball. (tomwhite)
+ HDFS_1057. Concurrent readers hit ChecksumExceptions if following a
+ writer to very end of file. (sam rash via hairong)
+
Release 0.20.3 - Unreleased
IMPROVEMENTS
Propchange: hadoop/hdfs/branches/branch-0.21/build.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun 30 14:30:23 2010
@@ -2,4 +2,4 @@
/hadoop/core/trunk/build.xml:779102
/hadoop/hdfs/branches/HDFS-265/build.xml:796829-820463
/hadoop/hdfs/branches/branch-0.21/build.xml:820487
-/hadoop/hdfs/trunk/build.xml:947194,950323,952861
+/hadoop/hdfs/trunk/build.xml:947194,950323,952861,959324
Propchange: hadoop/hdfs/branches/branch-0.21/src/c++/libhdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun 30 14:30:23 2010
@@ -1,3 +1,3 @@
/hadoop/core/branches/branch-0.19/mapred/src/c++/libhdfs:713112
/hadoop/core/trunk/src/c++/libhdfs:776175-784663
-/hadoop/hdfs/trunk/src/c++/libhdfs:947194,950323,952861
+/hadoop/hdfs/trunk/src/c++/libhdfs:947194,950323,952861,959324
Propchange: hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun 30 14:30:23 2010
@@ -2,4 +2,4 @@
/hadoop/core/trunk/src/contrib/hdfsproxy:776175-784663
/hadoop/hdfs/branches/HDFS-265/src/contrib/hdfsproxy:796829-820463
/hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy:820487
-/hadoop/hdfs/trunk/src/contrib/hdfsproxy:947194,950323,952861
+/hadoop/hdfs/trunk/src/contrib/hdfsproxy:947194,950323,952861,959324
Propchange: hadoop/hdfs/branches/branch-0.21/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun 30 14:30:23 2010
@@ -2,4 +2,4 @@
/hadoop/core/trunk/src/hdfs:776175-785643,785929-786278
/hadoop/hdfs/branches/HDFS-265/src/java:796829-820463
/hadoop/hdfs/branches/branch-0.21/src/java:820487
-/hadoop/hdfs/trunk/src/java:947194,950323,952861
+/hadoop/hdfs/trunk/src/java:947194,950323,952861,959324
Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/DFSInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/DFSInputStream.java?rev=959330&r1=959329&r2=959330&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/DFSInputStream.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/DFSInputStream.java Wed Jun 30 14:30:23 2010
@@ -36,7 +36,9 @@ import org.apache.hadoop.hdfs.protocol.L
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.security.BlockAccessToken;
import org.apache.hadoop.hdfs.security.InvalidAccessTokenException;
+import org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException;
import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.StringUtils;
@@ -138,6 +140,8 @@ class DFSInputStream extends FSInputStre
if (locatedblock == null || locatedblock.getLocations().length == 0) {
return 0;
}
+ int replicaNotFoundCount = locatedblock.getLocations().length;
+
for(DatanodeInfo datanode : locatedblock.getLocations()) {
try {
final ClientDatanodeProtocol cdp = DFSClient.createClientDatanodeProtocolProxy(
@@ -148,12 +152,28 @@ class DFSInputStream extends FSInputStre
}
}
catch(IOException ioe) {
+ if (ioe instanceof RemoteException &&
+ (((RemoteException) ioe).unwrapRemoteException() instanceof
+ ReplicaNotFoundException)) {
+ // special case : replica might not be on the DN, treat as 0 length
+ replicaNotFoundCount--;
+ }
+
if (DFSClient.LOG.isDebugEnabled()) {
- DFSClient.LOG.debug("Faild to getReplicaVisibleLength from datanode "
+ DFSClient.LOG.debug("Failed to getReplicaVisibleLength from datanode "
+ datanode + " for block " + locatedblock.getBlock(), ioe);
}
}
}
+
+ // Namenode told us about these locations, but none know about the replica
+ // means that we hit the race between pipeline creation start and end.
+ // we require all 3 because some other exception could have happened
+ // on a DN that has it. we want to report that error
+ if (replicaNotFoundCount == 0) {
+ return 0;
+ }
+
throw new IOException("Cannot obtain block length for " + locatedblock);
}
Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java?rev=959330&r1=959329&r2=959330&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java Wed Jun 30 14:30:23 2010
@@ -28,6 +28,7 @@ import java.io.EOFException;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
+import java.util.Arrays;
import java.util.LinkedList;
import java.util.zip.Checksum;
@@ -509,6 +510,8 @@ class BlockReceiver implements java.io.C
verifyChunks(pktBuf, dataOff, len, pktBuf, checksumOff);
}
+ byte[] lastChunkChecksum;
+
try {
long onDiskLen = replicaInfo.getBytesOnDisk();
if (onDiskLen<offsetInBlock) {
@@ -546,16 +549,28 @@ class BlockReceiver implements java.io.C
}
partialCrc.update(pktBuf, startByteToDisk, numBytesToDisk);
byte[] buf = FSOutputSummer.convertToByteStream(partialCrc, checksumSize);
+ lastChunkChecksum = Arrays.copyOfRange(
+ buf, buf.length - checksumSize, buf.length
+ );
checksumOut.write(buf);
LOG.debug("Writing out partial crc for data len " + len);
partialCrc = null;
} else {
+ lastChunkChecksum = Arrays.copyOfRange(
+ pktBuf,
+ checksumOff + checksumLen - checksumSize,
+ checksumOff + checksumLen
+ );
checksumOut.write(pktBuf, checksumOff, checksumLen);
}
- replicaInfo.setBytesOnDisk(offsetInBlock);
- datanode.myMetrics.bytesWritten.inc(len);
/// flush entire packet
flush();
+
+ replicaInfo.setLastChecksumAndDataLen(
+ offsetInBlock, lastChunkChecksum
+ );
+
+ datanode.myMetrics.bytesWritten.inc(len);
}
} catch (IOException iex) {
datanode.checkDiskError(iex);
Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java?rev=959330&r1=959329&r2=959330&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java Wed Jun 30 14:30:23 2010
@@ -76,6 +76,7 @@ class BlockSender implements java.io.Clo
* not sure if there will be much more improvement.
*/
private static final int MIN_BUFFER_WITH_TRANSFERTO = 64*1024;
+ private volatile ChunkChecksum lastChunkChecksum = null;
BlockSender(Block block, long startOffset, long length,
@@ -98,6 +99,32 @@ class BlockSender implements java.io.Clo
}
this.replicaVisibleLength = replica.getVisibleLength();
}
+ long minEndOffset = startOffset + length;
+ // if this is a write in progress
+ ChunkChecksum chunkChecksum = null;
+ if (replica instanceof ReplicaBeingWritten) {
+ for (int i = 0; i < 30 && replica.getBytesOnDisk() < minEndOffset; i++) {
+ try {
+ Thread.sleep(100);
+ } catch (InterruptedException ie) {
+ throw new IOException(ie);
+ }
+ }
+
+ long currentBytesOnDisk = replica.getBytesOnDisk();
+
+ if (currentBytesOnDisk < minEndOffset) {
+ throw new IOException(String.format(
+ "need %d bytes, but only %d bytes available",
+ minEndOffset,
+ currentBytesOnDisk
+ ));
+ }
+
+ ReplicaInPipeline rip = (ReplicaInPipeline) replica;
+ chunkChecksum = rip.getLastChecksumAndDataLen();
+ }
+
if (replica.getGenerationStamp() < block.getGenerationStamp()) {
throw new IOException(
"replica.getGenerationStamp() < block.getGenerationStamp(), block="
@@ -154,7 +181,14 @@ class BlockSender implements java.io.Clo
length = replicaVisibleLength;
}
- endOffset = replicaVisibleLength;
+ // end is either last byte on disk or the length for which we have a
+ // checksum
+ if (chunkChecksum != null) {
+ endOffset = chunkChecksum.getDataLength();
+ } else {
+ endOffset = replica.getBytesOnDisk();
+ }
+
if (startOffset < 0 || startOffset > endOffset
|| (length + startOffset) > endOffset) {
String msg = " Offset " + startOffset + " and length " + length
@@ -172,7 +206,12 @@ class BlockSender implements java.io.Clo
tmpLen += (bytesPerChecksum - tmpLen % bytesPerChecksum);
}
if (tmpLen < endOffset) {
+ // will use on-disk checksum here since the end is a stable chunk
endOffset = tmpLen;
+ } else if (chunkChecksum != null) {
+ //in last chunk which is changing. flag that we need to use in-memory
+ // checksum
+ this.lastChunkChecksum = chunkChecksum;
}
}
@@ -187,14 +226,6 @@ class BlockSender implements java.io.Clo
}
seqno = 0;
- //sleep a few times if getBytesOnDisk() < visible length
- for(int i = 0; i < 30 && replica.getBytesOnDisk() < replicaVisibleLength; i++) {
- try {
- Thread.sleep(100);
- } catch (InterruptedException ie) {
- throw new IOException(ie);
- }
- }
if (DataNode.LOG.isDebugEnabled()) {
DataNode.LOG.debug("replica=" + replica);
}
@@ -272,6 +303,7 @@ class BlockSender implements java.io.Clo
bytesPerChecksum*maxChunks);
int numChunks = (len + bytesPerChecksum - 1)/bytesPerChecksum;
int packetLen = len + numChunks*checksumSize + 4;
+ boolean lastDataPacket = offset + len == endOffset && len > 0;
pkt.clear();
// write packet header
@@ -304,6 +336,16 @@ class BlockSender implements java.io.Clo
throw e;
}
}
+
+ // write in progress that we need to use to get last checksum
+ if (lastDataPacket && lastChunkChecksum != null) {
+ int start = checksumOff + checksumLen - checksumSize;
+ byte[] updatedChecksum = lastChunkChecksum.getChecksum();
+
+ if (updatedChecksum != null) {
+ System.arraycopy(updatedChecksum, 0, buf, start, checksumSize);
+ }
+ }
}
int dataOff = checksumOff + checksumLen;
Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipeline.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipeline.java?rev=959330&r1=959329&r2=959330&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipeline.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipeline.java Wed Jun 30 14:30:23 2010
@@ -41,6 +41,7 @@ class ReplicaInPipeline extends ReplicaI
implements ReplicaInPipelineInterface {
private long bytesAcked;
private long bytesOnDisk;
+ private byte[] lastChecksum;
private Thread writer;
/**
@@ -122,11 +123,17 @@ class ReplicaInPipeline extends ReplicaI
return bytesOnDisk;
}
- @Override //ReplicaInPipelineInterface
- public void setBytesOnDisk(long bytesOnDisk) {
- this.bytesOnDisk = bytesOnDisk;
+ @Override // ReplicaInPipelineInterface
+ public synchronized void setLastChecksumAndDataLen(long dataLength, byte[] lastChecksum) {
+ this.bytesOnDisk = dataLength;
+ this.lastChecksum = lastChecksum;
}
+ @Override // ReplicaInPipelineInterface
+ public synchronized ChunkChecksum getLastChecksumAndDataLen() {
+ return new ChunkChecksum(getBytesOnDisk(), lastChecksum);
+ }
+
/**
* Set the thread that is writing to this replica
* @param writer a thread writing to this replica
Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipelineInterface.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipelineInterface.java?rev=959330&r1=959329&r2=959330&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipelineInterface.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipelineInterface.java Wed Jun 30 14:30:23 2010
@@ -44,10 +44,17 @@ interface ReplicaInPipelineInterface ext
void setBytesAcked(long bytesAcked);
/**
- * Set the number of bytes on disk
- * @param bytesOnDisk number of bytes on disk
+ * store the checksum for the last chunk along with the data length
+ * @param dataLength number of bytes on disk
+ * @param lastChecksum - checksum bytes for the last chunk
*/
- void setBytesOnDisk(long bytesOnDisk);
+ public void setLastChecksumAndDataLen(long dataLength, byte[] lastChecksum);
+
+ /**
+ * gets the last chunk checksum and the length of the block corresponding
+ * to that checksum
+ */
+ public ChunkChecksum getLastChecksumAndDataLen();
/**
* Create output streams for writing to this replica,
Propchange: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun 30 14:30:23 2010
@@ -4,4 +4,4 @@
/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:776175-785643,785929-786278
/hadoop/hdfs/branches/HDFS-265/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:796829-820463
/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:820487
-/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:947194,950323,952861
+/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:947194,950323,952861,959324
Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaNotFoundException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaNotFoundException.java?rev=959330&r1=959329&r2=959330&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaNotFoundException.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaNotFoundException.java Wed Jun 30 14:30:23 2010
@@ -27,7 +27,7 @@ import org.apache.hadoop.hdfs.protocol.B
* Exception indicating that DataNode does not have a replica
* that matches the target block.
*/
-class ReplicaNotFoundException extends IOException {
+public class ReplicaNotFoundException extends IOException {
private static final long serialVersionUID = 1L;
final static String NON_RBW_REPLICA = "Cannot recover a non-RBW replica ";
final static String UNFINALIZED_REPLICA =
Propchange: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun 30 14:30:23 2010
@@ -2,4 +2,4 @@
/hadoop/core/trunk/src/test/hdfs:776175-785643
/hadoop/hdfs/branches/HDFS-265/src/test/hdfs:796829-820463
/hadoop/hdfs/branches/branch-0.21/src/test/hdfs:820487
-/hadoop/hdfs/trunk/src/test/hdfs:947194,950323,952861
+/hadoop/hdfs/trunk/src/test/hdfs:947194,950323,952861,959324
Modified: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java?rev=959330&r1=959329&r2=959330&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java Wed Jun 30 14:30:23 2010
@@ -359,4 +359,14 @@ public class DFSTestUtil {
}
});
}
+
+ public static byte[] generateSequentialBytes(int start, int length) {
+ byte[] result = new byte[length];
+
+ for (int i = 0; i < length; i++) {
+ result[i] = (byte) ((start + i) % 127);
+ }
+
+ return result;
+ }
}
Modified: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java?rev=959330&r1=959329&r2=959330&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java Wed Jun 30 14:30:23 2010
@@ -249,10 +249,13 @@ public class SimulatedFSDataset impleme
}
@Override
- synchronized public void setBytesOnDisk(long bytesOnDisk) {
- if (!finalized) {
- oStream.setLength(bytesOnDisk);
- }
+ public void setLastChecksumAndDataLen(long dataLength, byte[] lastChecksum) {
+ oStream.setLength(dataLength);
+ }
+
+ @Override
+ public ChunkChecksum getLastChecksumAndDataLen() {
+ return new ChunkChecksum(oStream.getLength(), null);
}
}
Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/datanode/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun 30 14:30:23 2010
@@ -2,4 +2,4 @@
/hadoop/core/trunk/src/webapps/datanode:776175-784663
/hadoop/hdfs/branches/HDFS-265/src/webapps/datanode:796829-820463
/hadoop/hdfs/branches/branch-0.21/src/webapps/datanode:820487
-/hadoop/hdfs/trunk/src/webapps/datanode:947194,950323,952861
+/hadoop/hdfs/trunk/src/webapps/datanode:947194,950323,952861,959324
Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun 30 14:30:23 2010
@@ -2,4 +2,4 @@
/hadoop/core/trunk/src/webapps/hdfs:776175-784663
/hadoop/hdfs/branches/HDFS-265/src/webapps/hdfs:796829-820463
/hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs:820487
-/hadoop/hdfs/trunk/src/webapps/hdfs:947194,950323,952861
+/hadoop/hdfs/trunk/src/webapps/hdfs:947194,950323,952861,959324
Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/secondary/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jun 30 14:30:23 2010
@@ -2,4 +2,4 @@
/hadoop/core/trunk/src/webapps/secondary:776175-784663
/hadoop/hdfs/branches/HDFS-265/src/webapps/secondary:796829-820463
/hadoop/hdfs/branches/branch-0.21/src/webapps/secondary:820487
-/hadoop/hdfs/trunk/src/webapps/secondary:947194,950323,952861
+/hadoop/hdfs/trunk/src/webapps/secondary:947194,950323,952861,959324