You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by dh...@apache.org on 2009/08/13 20:11:18 UTC

svn commit: r803973 - in /hadoop/hdfs/trunk: CHANGES.txt src/java/org/apache/hadoop/hdfs/BlockMissingException.java src/java/org/apache/hadoop/hdfs/DFSClient.java src/test/hdfs/org/apache/hadoop/hdfs/TestBlockMissingException.java

Author: dhruba
Date: Thu Aug 13 18:11:18 2009
New Revision: 803973

URL: http://svn.apache.org/viewvc?rev=803973&view=rev
Log:
HDFS-532. Allow applications to know that a read request failed 
because block is missing. (dhruba)


Added:
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockMissingException.java
    hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestBlockMissingException.java
Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=803973&r1=803972&r2=803973&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Thu Aug 13 18:11:18 2009
@@ -150,6 +150,9 @@
 
     HDFS-534. Include avro in ivy.  (szetszwo)
 
+    HDFS-532. Allow applications to know that a read request failed 
+    because block is missing. (dhruba)
+
 Release 0.20.1 - Unreleased
 
   IMPROVEMENTS

Added: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockMissingException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockMissingException.java?rev=803973&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockMissingException.java (added)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/BlockMissingException.java Thu Aug 13 18:11:18 2009
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs;
+
+import java.io.IOException;
+
+/** 
+  * This exception is thrown when a read encounters a block that has no locations
+  * associated with it.
+  */
+public class BlockMissingException extends IOException {
+
+  private static final long serialVersionUID = 1L;
+
+  private String filename;
+  private long   offset;
+
+  /**
+   * An exception that indicates that file was corrupted.
+   * @param filename name of corrupted file
+   * @param description a description of the corruption details
+   */
+  public BlockMissingException(String filename, String description, long offset) {
+    super(description);
+    this.filename = filename;
+    this.offset = offset;
+  }
+
+  /**
+   * Returns the name of the corrupted file.
+   * @return name of corrupted file
+   */
+  public String getFile() {
+    return filename;
+  }
+
+  /**
+   * Returns the offset at which this file is corrupted
+   * @return offset of corrupted file
+   */
+  public long getOffset() {
+    return offset;
+  }
+}

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java?rev=803973&r1=803972&r2=803973&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java Thu Aug 13 18:11:18 2009
@@ -1906,7 +1906,8 @@
         } catch (IOException ie) {
           String blockInfo = block.getBlock() + " file=" + src;
           if (failures >= maxBlockAcquireFailures) {
-            throw new IOException("Could not obtain block: " + blockInfo);
+            throw new BlockMissingException(src, "Could not obtain block: " + blockInfo,
+                                            block.getStartOffset());
           }
           
           if (nodes == null || nodes.length == 0) {

Added: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestBlockMissingException.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestBlockMissingException.java?rev=803973&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestBlockMissingException.java (added)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestBlockMissingException.java Thu Aug 13 18:11:18 2009
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Properties;
+
+import junit.framework.TestCase;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.BlockMissingException;
+
+public class TestBlockMissingException extends TestCase {
+  final static Log LOG = LogFactory.getLog("org.apache.hadoop.hdfs.TestBlockMissing");
+  final static int NUM_DATANODES = 3;
+
+  Configuration conf;
+  MiniDFSCluster dfs = null;
+  DistributedFileSystem fileSys = null;
+
+  /**
+   * Test DFS Raid
+   */
+  public void testBlockMissingException() throws Exception {
+    LOG.info("Test testBlockMissingException started.");
+    long blockSize = 1024L;
+    int numBlocks = 4;
+    conf = new Configuration();
+    try {
+      dfs = new MiniDFSCluster(conf, NUM_DATANODES, true, null);
+      dfs.waitActive();
+      fileSys = (DistributedFileSystem)dfs.getFileSystem();
+      Path file1 = new Path("/user/dhruba/raidtest/file1");
+      createOldFile(fileSys, file1, 1, numBlocks, blockSize);
+
+      // extract block locations from File system. Wait till file is closed.
+      LocatedBlocks locations = null;
+      locations = fileSys.dfs.getNamenode().getBlockLocations(file1.toString(),
+                                                             0, numBlocks * blockSize);
+      // remove block of file
+      LOG.info("Remove first block of file");
+      corruptBlock(file1, locations.get(0).getBlock());
+
+      // validate that the system throws BlockMissingException
+      validateFile(fileSys, file1);
+    } finally {
+      if (fileSys != null) fileSys.close();
+      if (dfs != null) dfs.shutdown();
+    }
+    LOG.info("Test testBlockMissingException completed.");
+  }
+  
+  //
+  // creates a file and populate it with data.
+  //
+  private void createOldFile(FileSystem fileSys, Path name, int repl, int numBlocks, long blocksize)
+    throws IOException {
+    FSDataOutputStream stm = fileSys.create(name, true,
+                                            fileSys.getConf().getInt("io.file.buffer.size", 4096),
+                                            (short)repl, blocksize);
+    // fill data into file
+    final byte[] b = new byte[(int)blocksize];
+    for (int i = 0; i < numBlocks; i++) {
+      stm.write(b);
+    }
+    stm.close();
+  }
+
+  //
+  // validates that file encounters BlockMissingException
+  //
+  private void validateFile(FileSystem fileSys, Path name)
+    throws IOException {
+
+    FSDataInputStream stm = fileSys.open(name);
+    final byte[] b = new byte[4192];
+    int num = 0;
+    boolean gotException = false;
+
+    try {
+      while (num >= 0) {
+        num = stm.read(b);
+        if (num < 0) {
+          break;
+        }
+      }
+    } catch (BlockMissingException e) {
+      gotException = true;
+    }
+    stm.close();
+    assertTrue("Expected BlockMissingException ", gotException);
+  }
+
+  /*
+   * The Data directories for a datanode
+   */
+  private File[] getDataNodeDirs(int i) throws IOException {
+    File base_dir = new File(System.getProperty("test.build.data"), "dfs/");
+    File data_dir = new File(base_dir, "data");
+    File dir1 = new File(data_dir, "data"+(2*i+1));
+    File dir2 = new File(data_dir, "data"+(2*i+2));
+    if (dir1.isDirectory() && dir2.isDirectory()) {
+      File[] dir = new File[2];
+      dir[0] = new File(dir1, "current");
+      dir[1] = new File(dir2, "current"); 
+      return dir;
+    }
+    return new File[0];
+  }
+
+  //
+  // Corrupt specified block of file
+  //
+  void corruptBlock(Path file, Block blockNum) throws IOException {
+    long id = blockNum.getBlockId();
+
+    // Now deliberately remove/truncate data blocks from the block.
+    //
+    for (int i = 0; i < NUM_DATANODES; i++) {
+      File[] dirs = getDataNodeDirs(i);
+      
+      for (int j = 0; j < dirs.length; j++) {
+        File[] blocks = dirs[j].listFiles();
+        assertTrue("Blocks do not exist in data-dir", (blocks != null) && (blocks.length >= 0));
+        for (int idx = 0; idx < blocks.length; idx++) {
+          if (blocks[idx].getName().startsWith("blk_" + id) &&
+              !blocks[idx].getName().endsWith(".meta")) {
+            blocks[idx].delete();
+            LOG.info("Deleted block " + blocks[idx]);
+          }
+        }
+      }
+    }
+  }
+
+}