You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by ar...@apache.org on 2013/09/23 20:03:28 UTC
svn commit: r1525661 - in
/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test:
java/org/apache/hadoop/hdfs/TestDFSShell.java resources/testHDFSConf.xml
Author: arp
Date: Mon Sep 23 18:03:27 2013
New Revision: 1525661
URL: http://svn.apache.org/r1525661
Log:
HDFS-5139. Merge r1525659 from trunk to branch-2
Modified:
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java?rev=1525661&r1=1525660&r2=1525661&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java Mon Sep 23 18:03:27 2013
@@ -61,6 +61,8 @@ import static org.junit.Assert.*;
public class TestDFSShell {
private static final Log LOG = LogFactory.getLog(TestDFSShell.class);
private static AtomicInteger counter = new AtomicInteger();
+ private final int SUCCESS = 0;
+ private final int ERROR = 1;
static final String TEST_ROOT_DIR = PathUtils.getTestDirName(TestDFSShell.class);
@@ -1603,9 +1605,6 @@ public class TestDFSShell {
// force Copy Option is -f
@Test (timeout = 30000)
public void testCopyCommandsWithForceOption() throws Exception {
- final int SUCCESS = 0;
- final int ERROR = 1;
-
Configuration conf = new Configuration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1)
.format(true).build();
@@ -1666,7 +1665,55 @@ public class TestDFSShell {
}
cluster.shutdown();
}
+ }
+
+ // setrep for file and directory.
+ @Test (timeout = 30000)
+ public void testSetrep() throws Exception {
+
+ Configuration conf = new Configuration();
+ MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1)
+ .format(true).build();
+ FsShell shell = null;
+ FileSystem fs = null;
+
+ final String testdir1 = "/tmp/TestDFSShell-testSetrep-" + counter.getAndIncrement();
+ final String testdir2 = testdir1 + "/nestedDir";
+ final Path hdfsFile1 = new Path(testdir1, "testFileForSetrep");
+ final Path hdfsFile2 = new Path(testdir2, "testFileForSetrep");
+ final Short oldRepFactor = new Short((short) 1);
+ final Short newRepFactor = new Short((short) 3);
+ try {
+ String[] argv;
+ cluster.waitActive();
+ fs = cluster.getFileSystem();
+ assertThat(fs.mkdirs(new Path(testdir2)), is(true));
+ shell = new FsShell(conf);
+
+ fs.create(hdfsFile1, true).close();
+ fs.create(hdfsFile2, true).close();
+
+ // Tests for setrep on a file.
+ argv = new String[] { "-setrep", newRepFactor.toString(), hdfsFile1.toString() };
+ assertThat(shell.run(argv), is(SUCCESS));
+ assertThat(fs.getFileStatus(hdfsFile1).getReplication(), is(newRepFactor));
+ assertThat(fs.getFileStatus(hdfsFile2).getReplication(), is(oldRepFactor));
+
+ // Tests for setrep
+
+ // Tests for setrep on a directory and make sure it is applied recursively.
+ argv = new String[] { "-setrep", newRepFactor.toString(), testdir1 };
+ assertThat(shell.run(argv), is(SUCCESS));
+ assertThat(fs.getFileStatus(hdfsFile1).getReplication(), is(newRepFactor));
+ assertThat(fs.getFileStatus(hdfsFile2).getReplication(), is(newRepFactor));
+
+ } finally {
+ if (shell != null) {
+ shell.close();
+ }
+ cluster.shutdown();
+ }
}
/**
Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml?rev=1525661&r1=1525660&r2=1525661&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml Mon Sep 23 18:03:27 2013
@@ -6049,7 +6049,7 @@
<command>-fs NAMENODE -mkdir /dir0</command>
<command>-fs NAMENODE -touchz /dir0/file0</command>
<command>-fs NAMENODE -touchz /dir0/file1</command>
- <command>-fs NAMENODE -setrep -R 2 /dir0</command>
+ <command>-fs NAMENODE -setrep 2 /dir0</command>
</test-commands>
<cleanup-commands>
<command>-fs NAMENODE -rm -r /user</command>
@@ -6072,7 +6072,7 @@
<command>-fs NAMENODE -mkdir -p dir0</command>
<command>-fs NAMENODE -touchz dir0/file0</command>
<command>-fs NAMENODE -touchz dir0/file1</command>
- <command>-fs NAMENODE -setrep -R 2 dir0</command>
+ <command>-fs NAMENODE -setrep 2 dir0</command>
</test-commands>
<cleanup-commands>
<command>-fs NAMENODE -rm -r /user</command>
@@ -6090,6 +6090,24 @@
</test>
<test> <!-- TESTED -->
+ <description>setrep: -R ignored for existing file</description>
+ <test-commands>
+ <command>-fs NAMENODE -mkdir -p dir0</command>
+ <command>-fs NAMENODE -touchz dir0/file0</command>
+ <command>-fs NAMENODE -setrep -R 2 dir0/file0</command>
+ </test-commands>
+ <cleanup-commands>
+ <command>-fs NAMENODE -rm -r /user</command>
+ </cleanup-commands>
+ <comparators>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^Replication 2 set: dir0/file0</expected-output>
+ </comparator>
+ </comparators>
+ </test>
+
+ <test> <!-- TESTED -->
<description>setrep: non existent file (absolute path)</description>
<test-commands>
<command>-fs NAMENODE -setrep 2 /dir0/file</command>
@@ -6145,7 +6163,7 @@
<command>-fs NAMENODE -mkdir hdfs:///dir0/</command>
<command>-fs NAMENODE -touchz hdfs:///dir0/file0</command>
<command>-fs NAMENODE -touchz hdfs:///dir0/file1</command>
- <command>-fs NAMENODE -setrep -R 2 hdfs:///dir0</command>
+ <command>-fs NAMENODE -setrep 2 hdfs:///dir0</command>
</test-commands>
<cleanup-commands>
<command>-fs NAMENODE -rm -r hdfs:///*</command>
@@ -6203,7 +6221,7 @@
<command>-fs NAMENODE -mkdir -p NAMENODE/dir0</command>
<command>-fs NAMENODE -touchz NAMENODE/dir0/file0</command>
<command>-fs NAMENODE -touchz NAMENODE/dir0/file1</command>
- <command>-fs NAMENODE -setrep -R 2 NAMENODE/dir0</command>
+ <command>-fs NAMENODE -setrep 2 NAMENODE/dir0</command>
</test-commands>
<cleanup-commands>
<command>-fs NAMENODE -rm -r NAMENODE/*</command>