You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dh...@apache.org on 2008/03/20 18:13:13 UTC
svn commit: r639362 - in /hadoop/core/trunk: CHANGES.txt
src/java/org/apache/hadoop/fs/ChecksumFileSystem.java
src/java/org/apache/hadoop/fs/FilterFileSystem.java
src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java
Author: dhruba
Date: Thu Mar 20 10:13:11 2008
New Revision: 639362
URL: http://svn.apache.org/viewvc?rev=639362&view=rev
Log:
HADOOP-3025. ChecksumFileSystem supports the delete method with
the recursive flag. (Mahadev Konar via dhruba)
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java
hadoop/core/trunk/src/java/org/apache/hadoop/fs/FilterFileSystem.java
hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java
Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=639362&r1=639361&r2=639362&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu Mar 20 10:13:11 2008
@@ -316,6 +316,8 @@
HADOOP-2936. Fixes HOD in a way that it generates hdfs://host:port on the
client side configs. (Vinod Kumar Vavilapalli via ddas)
+ HADOOP-3025. ChecksumFileSystem supports the delete method with
+ the recursive flag. (Mahadev Konar via dhruba)
Release 0.16.2 - Unreleased
Modified: hadoop/core/trunk/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java?rev=639362&r1=639361&r2=639362&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java Thu Mar 20 10:13:11 2008
@@ -409,21 +409,30 @@
}
/**
- * Get rid of Path f, whether a true file or dir.
+ * Implement the delete(Path, boolean) in checksum
+ * file system.
*/
- public boolean delete(Path f) throws IOException {
- if (fs.isDirectory(f)) {
- return fs.delete(f, true);
+ public boolean delete(Path f, boolean recursive) throws IOException{
+ FileStatus fstatus = null;
+ try {
+ fstatus = fs.getFileStatus(f);
+ } catch(FileNotFoundException e) {
+ return false;
+ }
+ if(fstatus.isDir()) {
+ //this works since the crcs are in the same
+ //directories and the files. so we just delete
+ //everything in the underlying filesystem
+ return fs.delete(f, recursive);
} else {
Path checkFile = getChecksumFile(f);
if (fs.exists(checkFile)) {
fs.delete(checkFile, true);
}
-
return fs.delete(f, true);
}
}
-
+
final private static PathFilter DEFAULT_FILTER = new PathFilter() {
public boolean accept(Path file) {
return !isChecksumFile(file);
Modified: hadoop/core/trunk/src/java/org/apache/hadoop/fs/FilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/fs/FilterFileSystem.java?rev=639362&r1=639361&r2=639362&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/fs/FilterFileSystem.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/fs/FilterFileSystem.java Thu Mar 20 10:13:11 2008
@@ -137,7 +137,7 @@
/** Delete a file */@Deprecated
public boolean delete(Path f) throws IOException {
- return fs.delete(f);
+ return delete(f, true);
}
/** Delete a file */
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java?rev=639362&r1=639361&r2=639362&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java Thu Mar 20 10:13:11 2008
@@ -1,35 +1,69 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs;
-
-import junit.framework.TestCase;
-
-public class TestChecksumFileSystem extends TestCase {
- public void testgetChecksumLength() throws Exception {
- assertEquals(8, ChecksumFileSystem.getChecksumLength(0L, 512));
- assertEquals(12, ChecksumFileSystem.getChecksumLength(1L, 512));
- assertEquals(12, ChecksumFileSystem.getChecksumLength(512L, 512));
- assertEquals(16, ChecksumFileSystem.getChecksumLength(513L, 512));
- assertEquals(16, ChecksumFileSystem.getChecksumLength(1023L, 512));
- assertEquals(16, ChecksumFileSystem.getChecksumLength(1024L, 512));
- assertEquals(408, ChecksumFileSystem.getChecksumLength(100L, 1));
- assertEquals(4000000000008L,
- ChecksumFileSystem.getChecksumLength(10000000000000L, 10));
- }
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs;
+
+import java.net.URI;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.InMemoryFileSystem;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.conf.Configuration;
+import junit.framework.TestCase;
+
+public class TestChecksumFileSystem extends TestCase {
+ public void testgetChecksumLength() throws Exception {
+ assertEquals(8, ChecksumFileSystem.getChecksumLength(0L, 512));
+ assertEquals(12, ChecksumFileSystem.getChecksumLength(1L, 512));
+ assertEquals(12, ChecksumFileSystem.getChecksumLength(512L, 512));
+ assertEquals(16, ChecksumFileSystem.getChecksumLength(513L, 512));
+ assertEquals(16, ChecksumFileSystem.getChecksumLength(1023L, 512));
+ assertEquals(16, ChecksumFileSystem.getChecksumLength(1024L, 512));
+ assertEquals(408, ChecksumFileSystem.getChecksumLength(100L, 1));
+ assertEquals(4000000000008L,
+ ChecksumFileSystem.getChecksumLength(10000000000000L, 10));
+ }
+
+ // cehck that the checksum file is deleted for Checksum file system.
+ public void testDeletionOfCheckSum() throws Exception {
+ Configuration conf = new Configuration();
+ URI uri = URI.create("ramfs://mapoutput" + "_tmp");
+ InMemoryFileSystem inMemFs = (InMemoryFileSystem)FileSystem.get(uri, conf);
+ Path testPath = new Path("/file_1");
+ inMemFs.reserveSpaceWithCheckSum(testPath, 1024);
+ FSDataOutputStream fout = inMemFs.create(testPath);
+ fout.write("testing".getBytes());
+ fout.close();
+ assertTrue("checksum exists", inMemFs.exists(inMemFs.getChecksumFile(testPath)));
+ inMemFs.delete(testPath, true);
+ assertTrue("checksum deleted", !inMemFs.exists(inMemFs.getChecksumFile(testPath)));
+ // check for directories getting deleted.
+ testPath = new Path("/tesdir/file_1");
+ inMemFs.reserveSpaceWithCheckSum(testPath, 1024);
+ fout = inMemFs.create(testPath);
+ fout.write("testing".getBytes());
+ fout.close();
+ testPath = new Path("/testdir/file_2");
+ inMemFs.reserveSpaceWithCheckSum(testPath, 1024);
+ fout = inMemFs.create(testPath);
+ fout.write("testing".getBytes());
+ fout.close();
+ inMemFs.delete(testPath, true);
+ assertTrue("nothing in the namespace", inMemFs.listStatus(new Path("/")).length == 0);
+ }
+}