You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cd...@apache.org on 2008/12/05 03:12:17 UTC
svn commit: r723559 - in /hadoop/core/trunk: ./
src/core/org/apache/hadoop/fs/ src/hdfs/org/apache/hadoop/hdfs/
src/test/org/apache/hadoop/fs/ src/test/org/apache/hadoop/hdfs/
Author: cdouglas
Date: Thu Dec 4 18:12:16 2008
New Revision: 723559
URL: http://svn.apache.org/viewvc?rev=723559&view=rev
Log:
Revert HADOOP-4648.
Added:
hadoop/core/trunk/src/core/org/apache/hadoop/fs/InMemoryFileSystem.java
- copied unchanged from r723554, hadoop/core/trunk/src/core/org/apache/hadoop/fs/InMemoryFileSystem.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/ChecksumDistributedFileSystem.java
- copied unchanged from r723554, hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/ChecksumDistributedFileSystem.java
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDFSShell.java
Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=723559&r1=723558&r2=723559&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu Dec 4 18:12:16 2008
@@ -50,10 +50,6 @@
HADOOP-3497. Fix bug in overly restrictive file globbing with a
PathFilter. (tomwhite)
- HADOOP-4648. Remove deprecated ChecksumFileSystems. (cdouglas)
- InMemoryFileSystem is removed.
- ChecksumDistributedFileSystem is removed.
-
NEW FEATURES
HADOOP-4575. Add a proxy service for relaying HsftpFileSystem requests.
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java?rev=723559&r1=723558&r2=723559&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestChecksumFileSystem.java Thu Dec 4 18:12:16 2008
@@ -21,6 +21,7 @@
import java.net.URI;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.InMemoryFileSystem;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.conf.Configuration;
import junit.framework.TestCase;
@@ -38,6 +39,34 @@
ChecksumFileSystem.getChecksumLength(10000000000000L, 10));
}
+ // cehck that the checksum file is deleted for Checksum file system.
+ public void testDeletionOfCheckSum() throws Exception {
+ Configuration conf = new Configuration();
+ URI uri = URI.create("ramfs://mapoutput" + "_tmp");
+ InMemoryFileSystem inMemFs = (InMemoryFileSystem)FileSystem.get(uri, conf);
+ Path testPath = new Path("/file_1");
+ inMemFs.reserveSpaceWithCheckSum(testPath, 1024);
+ FSDataOutputStream fout = inMemFs.create(testPath);
+ fout.write("testing".getBytes());
+ fout.close();
+ assertTrue("checksum exists", inMemFs.exists(inMemFs.getChecksumFile(testPath)));
+ inMemFs.delete(testPath, true);
+ assertTrue("checksum deleted", !inMemFs.exists(inMemFs.getChecksumFile(testPath)));
+ // check for directories getting deleted.
+ testPath = new Path("/tesdir/file_1");
+ inMemFs.reserveSpaceWithCheckSum(testPath, 1024);
+ fout = inMemFs.create(testPath);
+ fout.write("testing".getBytes());
+ fout.close();
+ testPath = new Path("/testdir/file_2");
+ inMemFs.reserveSpaceWithCheckSum(testPath, 1024);
+ fout = inMemFs.create(testPath);
+ fout.write("testing".getBytes());
+ fout.close();
+ inMemFs.delete(testPath, true);
+ assertTrue("nothing in the namespace", inMemFs.listStatus(new Path("/")).length == 0);
+ }
+
public void testVerifyChecksum() throws Exception {
String TEST_ROOT_DIR
= System.getProperty("test.build.data","build/test/data/work-dir/localfs");
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDFSShell.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDFSShell.java?rev=723559&r1=723558&r2=723559&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDFSShell.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/TestDFSShell.java Thu Dec 4 18:12:16 2008
@@ -581,11 +581,15 @@
public void testCopyToLocal() throws IOException {
Configuration conf = new Configuration();
+ /* This tests some properties of ChecksumFileSystem as well.
+ * Make sure that we create ChecksumDFS */
+ conf.set("fs.hdfs.impl",
+ "org.apache.hadoop.hdfs.ChecksumDistributedFileSystem");
MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
FileSystem fs = cluster.getFileSystem();
assertTrue("Not a HDFS: "+fs.getUri(),
- fs instanceof DistributedFileSystem);
- DistributedFileSystem dfs = (DistributedFileSystem)fs;
+ fs instanceof ChecksumDistributedFileSystem);
+ ChecksumDistributedFileSystem dfs = (ChecksumDistributedFileSystem)fs;
FsShell shell = new FsShell();
shell.setConf(conf);
@@ -868,11 +872,13 @@
Configuration conf = new Configuration();
/* This tests some properties of ChecksumFileSystem as well.
* Make sure that we create ChecksumDFS */
+ conf.set("fs.hdfs.impl",
+ "org.apache.hadoop.hdfs.ChecksumDistributedFileSystem");
MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
FileSystem fs = cluster.getFileSystem();
assertTrue("Not a HDFS: "+fs.getUri(),
- fs instanceof DistributedFileSystem);
- DistributedFileSystem fileSys = (DistributedFileSystem)fs;
+ fs instanceof ChecksumDistributedFileSystem);
+ ChecksumDistributedFileSystem fileSys = (ChecksumDistributedFileSystem)fs;
FsShell shell = new FsShell();
shell.setConf(conf);
@@ -931,6 +937,56 @@
}
fileSys.delete(myFile2, true);
+ // Verify that we can get with and without crc
+ {
+ File testFile = new File(TEST_ROOT_DIR, "mkdirs/myFile");
+ File checksumFile = new File(fileSys.getChecksumFile(
+ new Path(testFile.getAbsolutePath())).toString());
+ testFile.delete();
+ checksumFile.delete();
+
+ String[] args = new String[3];
+ args[0] = "-get";
+ args[1] = "/test/mkdirs";
+ args[2] = TEST_ROOT_DIR;
+ int val = -1;
+ try {
+ val = shell.run(args);
+ } catch (Exception e) {
+ System.err.println("Exception raised from DFSShell.run " +
+ e.getLocalizedMessage());
+ }
+ assertTrue(val == 0);
+ assertTrue("Copying failed.", testFile.exists());
+ assertTrue("Checksum file " + checksumFile+" is copied.", !checksumFile.exists());
+ testFile.delete();
+ }
+ {
+ File testFile = new File(TEST_ROOT_DIR, "mkdirs/myFile");
+ File checksumFile = new File(fileSys.getChecksumFile(
+ new Path(testFile.getAbsolutePath())).toString());
+ testFile.delete();
+ checksumFile.delete();
+
+ String[] args = new String[4];
+ args[0] = "-get";
+ args[1] = "-crc";
+ args[2] = "/test/mkdirs";
+ args[3] = TEST_ROOT_DIR;
+ int val = -1;
+ try {
+ val = shell.run(args);
+ } catch (Exception e) {
+ System.err.println("Exception raised from DFSShell.run " +
+ e.getLocalizedMessage());
+ }
+ assertTrue(val == 0);
+
+ assertTrue("Copying data file failed.", testFile.exists());
+ assertTrue("Checksum file " + checksumFile+" not copied.", checksumFile.exists());
+ testFile.delete();
+ checksumFile.delete();
+ }
// Verify that we get an error while trying to read an nonexistent file
{
String[] args = new String[2];