You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2008/09/27 18:17:52 UTC
svn commit: r699679 - in /hadoop/core/branches/branch-0.18: CHANGES.txt
src/hdfs/org/apache/hadoop/dfs/FSDataset.java
src/test/org/apache/hadoop/dfs/TestFileCreation.java
Author: szetszwo
Date: Sat Sep 27 09:17:52 2008
New Revision: 699679
URL: http://svn.apache.org/viewvc?rev=699679&view=rev
Log:
HADOOP-3614. Fix a bug that Datanode may use an old GenerationStamp to get meta file. (szetszwo)
Modified:
hadoop/core/branches/branch-0.18/CHANGES.txt
hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/FSDataset.java
hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestFileCreation.java
Modified: hadoop/core/branches/branch-0.18/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/CHANGES.txt?rev=699679&r1=699678&r2=699679&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/CHANGES.txt (original)
+++ hadoop/core/branches/branch-0.18/CHANGES.txt Sat Sep 27 09:17:52 2008
@@ -6,6 +6,9 @@
HADOOP-4116. Balancer should provide better resource management. (hairong)
+ HADOOP-3614. Fix a bug that Datanode may use an old GenerationStamp to get
+ meta file. (szetszwo)
+
NEW FEATURES
HADOOP-2421. Add jdiff output to documentation, listing all API
Modified: hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/FSDataset.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/FSDataset.java?rev=699679&r1=699678&r2=699679&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/FSDataset.java (original)
+++ hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/FSDataset.java Sat Sep 27 09:17:52 2008
@@ -606,9 +606,10 @@
}
}
- File findBlockFile(Block b) {
- assert b.generationStamp == GenerationStamp.WILDCARD_STAMP;
-
+ /** Return the block file for the given ID */
+ public File findBlockFile(long blockId) {
+ final Block b = new Block(blockId);
+
File blockfile = null;
ActiveFile activefile = ongoingCreates.get(b);
if (activefile != null) {
@@ -627,16 +628,14 @@
}
/** {@inheritDoc} */
- public Block getStoredBlock(long blkid) throws IOException {
- Block b = new Block(blkid);
- File blockfile = findBlockFile(b);
+ public synchronized Block getStoredBlock(long blkid) throws IOException {
+ File blockfile = findBlockFile(blkid);
if (blockfile == null) {
return null;
}
File metafile = findMetaFile(blockfile);
- b.generationStamp = parseGenerationStamp(blockfile, metafile);
- b.len = blockfile.length();
- return b;
+ return new Block(blkid, blockfile.length(),
+ parseGenerationStamp(blockfile, metafile));
}
public boolean metaFileExists(Block b) throws IOException {
@@ -791,10 +790,13 @@
+ ") to newblock (=" + newblock + ").");
}
- File blockFile = findBlockFile(oldblock);
+ File blockFile = findBlockFile(oldblock.getBlockId());
+ if (blockFile == null) {
+ throw new IOException("Block " + oldblock + " does not exist.");
+ }
interruptOngoingCreates(oldblock);
-
- File oldMetaFile = getMetaFile(blockFile, oldblock);
+
+ File oldMetaFile = findMetaFile(blockFile);
long oldgs = parseGenerationStamp(blockFile, oldMetaFile);
//rename meta file to a tmp file
Modified: hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestFileCreation.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestFileCreation.java?rev=699679&r1=699678&r2=699679&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestFileCreation.java (original)
+++ hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestFileCreation.java Sat Sep 27 09:17:52 2008
@@ -17,15 +17,21 @@
*/
package org.apache.hadoop.dfs;
-import java.io.*;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Random;
+import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
-
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.log4j.Level;
@@ -668,7 +674,7 @@
DataNode datanode = cluster.getDataNode(datanodeinfo.ipcPort);
FSDataset dataset = (FSDataset)datanode.data;
Block b = dataset.getStoredBlock(locatedblock.getBlock().blkid);
- File blockfile = dataset.findBlockFile(b);
+ File blockfile = dataset.findBlockFile(b.getBlockId());
System.out.println("blockfile=" + blockfile);
if (blockfile != null) {
BufferedReader in = new BufferedReader(new FileReader(blockfile));