You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ac...@apache.org on 2012/11/28 16:17:29 UTC

svn commit: r1414750 - in /hadoop/common/branches/MR-2454: ./ hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/ hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/ hadoop-tools/hadoop-extras/src/test/java/org/apache/hado...

Author: acmurthy
Date: Wed Nov 28 15:17:21 2012
New Revision: 1414750

URL: http://svn.apache.org/viewvc?rev=1414750&view=rev
Log:
Rebased on trunk.

Added:
    hadoop/common/branches/MR-2454/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestExternalCall.java
      - copied unchanged from r1414746, hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestExternalCall.java
    hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestLogalyzer.java
      - copied unchanged from r1414746, hadoop/common/trunk/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestLogalyzer.java
Modified:
    hadoop/common/branches/MR-2454/   (props changed)
    hadoop/common/branches/MR-2454/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
    hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
    hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
    hadoop/common/branches/MR-2454/pom.xml

Propchange: hadoop/common/branches/MR-2454/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk:r1411704-1414746

Modified: hadoop/common/branches/MR-2454/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2454/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java?rev=1414750&r1=1414749&r2=1414750&view=diff
==============================================================================
--- hadoop/common/branches/MR-2454/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java (original)
+++ hadoop/common/branches/MR-2454/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java Wed Nov 28 15:17:21 2012
@@ -27,8 +27,6 @@ import java.util.Collections;
 import java.util.List;
 import java.util.StringTokenizer;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
@@ -38,111 +36,117 @@ import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.util.JarFinder;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
 import org.apache.log4j.Level;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * test {@link HadoopArchives}
  */
-public class TestHadoopArchives extends TestCase {
+public class TestHadoopArchives {
 
-  public static final String HADOOP_ARCHIVES_JAR = JarFinder.getJar(HadoopArchives.class);
+  public static final String HADOOP_ARCHIVES_JAR = JarFinder
+      .getJar(HadoopArchives.class);
 
   {
-    ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.security.Groups.class)
-        ).getLogger().setLevel(Level.ERROR);
-    ((Log4JLogger)org.apache.hadoop.ipc.Server.LOG
-        ).getLogger().setLevel(Level.ERROR);
-    ((Log4JLogger)org.apache.hadoop.util.AsyncDiskService.LOG
-        ).getLogger().setLevel(Level.ERROR);
+    ((Log4JLogger) LogFactory.getLog(org.apache.hadoop.security.Groups.class))
+        .getLogger().setLevel(Level.ERROR);
+
   }
 
   private static final String inputDir = "input";
 
   private Path inputPath;
   private MiniDFSCluster dfscluster;
-  private MiniMRCluster mapred;
+
+  private Configuration conf;
   private FileSystem fs;
   private Path archivePath;
-  
-  static private Path createFile(Path dir, String filename, FileSystem fs
-      ) throws IOException {
+
+  static private Path createFile(Path dir, String filename, FileSystem fs)
+      throws IOException {
     final Path f = new Path(dir, filename);
-    final FSDataOutputStream out = fs.create(f); 
+    final FSDataOutputStream out = fs.create(f);
     out.write(filename.getBytes());
     out.close();
     return f;
   }
-  
-  protected void setUp() throws Exception {
-    super.setUp();
-    dfscluster = new MiniDFSCluster(new Configuration(), 2, true, null);
+
+  @Before
+  public void setUp() throws Exception {
+    conf = new Configuration();
+    conf.set(CapacitySchedulerConfiguration.PREFIX
+        + CapacitySchedulerConfiguration.ROOT + "."
+        + CapacitySchedulerConfiguration.QUEUES, "default");
+    conf.set(CapacitySchedulerConfiguration.PREFIX
+        + CapacitySchedulerConfiguration.ROOT + ".default."
+        + CapacitySchedulerConfiguration.CAPACITY, "100");
+    dfscluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).format(true)
+        .build();
+
     fs = dfscluster.getFileSystem();
-    mapred = new MiniMRCluster(2, fs.getUri().toString(), 1);
-    inputPath = new Path(fs.getHomeDirectory(), inputDir); 
+    inputPath = new Path(fs.getHomeDirectory(), inputDir);
     archivePath = new Path(fs.getHomeDirectory(), "archive");
     fs.mkdirs(inputPath);
     createFile(inputPath, "a", fs);
     createFile(inputPath, "b", fs);
     createFile(inputPath, "c", fs);
   }
-  
-  protected void tearDown() throws Exception {
+
+  @After
+  public void tearDown() throws Exception {
     try {
-      if (mapred != null) {
-        mapred.shutdown();
+      if (dfscluster != null) {
+        dfscluster.shutdown();
       }
       if (dfscluster != null) {
         dfscluster.shutdown();
       }
-    } catch(Exception e) {
+    } catch (Exception e) {
       System.err.println(e);
     }
-    super.tearDown();
   }
-  
-   
+
+  @Test
   public void testRelativePath() throws Exception {
     fs.delete(archivePath, true);
 
     final Path sub1 = new Path(inputPath, "dir1");
     fs.mkdirs(sub1);
     createFile(sub1, "a", fs);
-    final Configuration conf = mapred.createJobConf();
     final FsShell shell = new FsShell(conf);
 
     final List<String> originalPaths = lsr(shell, "input");
     System.out.println("originalPath: " + originalPaths);
     final URI uri = fs.getUri();
-    final String prefix = "har://hdfs-" + uri.getHost() +":" + uri.getPort()
+    final String prefix = "har://hdfs-" + uri.getHost() + ":" + uri.getPort()
         + archivePath.toUri().getPath() + Path.SEPARATOR;
 
     {
       final String harName = "foo.har";
-      final String[] args = {
-          "-archiveName",
-          harName,
-          "-p",
-          "input",
-          "*",
-          "archive"
-      };
-      System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH, HADOOP_ARCHIVES_JAR);
-      final HadoopArchives har = new HadoopArchives(mapred.createJobConf());
-      assertEquals(0, ToolRunner.run(har, args));
+      final String[] args = { "-archiveName", harName, "-p", "input", "*",
+          "archive" };
+      System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH,
+          HADOOP_ARCHIVES_JAR);
+      final HadoopArchives har = new HadoopArchives(conf);
+      Assert.assertEquals(0, ToolRunner.run(har, args));
 
-      //compare results
+      // compare results
       final List<String> harPaths = lsr(shell, prefix + harName);
-      assertEquals(originalPaths, harPaths);
+      Assert.assertEquals(originalPaths, harPaths);
     }
   }
-
+  
+@Test
   public void testPathWithSpaces() throws Exception {
     fs.delete(archivePath, true);
 
-    //create files/directories with spaces
+    // create files/directories with spaces
     createFile(inputPath, "c c", fs);
     final Path sub1 = new Path(inputPath, "sub 1");
     fs.mkdirs(sub1);
@@ -154,42 +158,36 @@ public class TestHadoopArchives extends 
     final Path sub2 = new Path(inputPath, "sub 1 with suffix");
     fs.mkdirs(sub2);
     createFile(sub2, "z", fs);
-    final Configuration conf = mapred.createJobConf();
+
     final FsShell shell = new FsShell(conf);
 
     final String inputPathStr = inputPath.toUri().getPath();
-    System.out.println("inputPathStr = " + inputPathStr);
 
     final List<String> originalPaths = lsr(shell, inputPathStr);
     final URI uri = fs.getUri();
-    final String prefix = "har://hdfs-" + uri.getHost() +":" + uri.getPort()
+    final String prefix = "har://hdfs-" + uri.getHost() + ":" + uri.getPort()
         + archivePath.toUri().getPath() + Path.SEPARATOR;
 
-    {//Enable space replacement
+    {// Enable space replacement
       final String harName = "foo.har";
-      final String[] args = {
-          "-archiveName",
-          harName,
-          "-p",
-          inputPathStr,
-          "*",
-          archivePath.toString()
-      };
-      System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH, HADOOP_ARCHIVES_JAR);
-      final HadoopArchives har = new HadoopArchives(mapred.createJobConf());
-      assertEquals(0, ToolRunner.run(har, args));
+      final String[] args = { "-archiveName", harName, "-p", inputPathStr, "*",
+          archivePath.toString() };
+      System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH,
+          HADOOP_ARCHIVES_JAR);
+      final HadoopArchives har = new HadoopArchives(conf);
+      Assert.assertEquals(0, ToolRunner.run(har, args));
 
-      //compare results
+      // compare results
       final List<String> harPaths = lsr(shell, prefix + harName);
-      assertEquals(originalPaths, harPaths);
+      Assert.assertEquals(originalPaths, harPaths);
     }
 
   }
 
-  private static List<String> lsr(final FsShell shell, String dir
-      ) throws Exception {
+  private static List<String> lsr(final FsShell shell, String dir)
+      throws Exception {
     System.out.println("lsr root=" + dir);
-    final ByteArrayOutputStream bytes = new ByteArrayOutputStream(); 
+    final ByteArrayOutputStream bytes = new ByteArrayOutputStream();
     final PrintStream out = new PrintStream(bytes);
     final PrintStream oldOut = System.out;
     final PrintStream oldErr = System.err;
@@ -197,7 +195,7 @@ public class TestHadoopArchives extends 
     System.setErr(out);
     final String results;
     try {
-      assertEquals(0, shell.run(new String[]{"-lsr", dir}));
+      Assert.assertEquals(0, shell.run(new String[] { "-lsr", dir }));
       results = bytes.toString();
     } finally {
       IOUtils.closeStream(out);
@@ -206,13 +204,13 @@ public class TestHadoopArchives extends 
     }
     System.out.println("lsr results:\n" + results);
     String dirname = dir;
-    if (dir.lastIndexOf(Path.SEPARATOR) != -1 ) {
+    if (dir.lastIndexOf(Path.SEPARATOR) != -1) {
       dirname = dir.substring(dir.lastIndexOf(Path.SEPARATOR));
     }
 
     final List<String> paths = new ArrayList<String>();
-    for(StringTokenizer t = new StringTokenizer(results, "\n");
-        t.hasMoreTokens(); ) {
+    for (StringTokenizer t = new StringTokenizer(results, "\n"); t
+        .hasMoreTokens();) {
       final String s = t.nextToken();
       final int i = s.indexOf(dirname);
       if (i >= 0) {
@@ -220,7 +218,8 @@ public class TestHadoopArchives extends 
       }
     }
     Collections.sort(paths);
-    System.out.println("lsr paths = " + paths.toString().replace(", ", ",\n  "));
+    System.out
+        .println("lsr paths = " + paths.toString().replace(", ", ",\n  "));
     return paths;
   }
 }

Modified: hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java?rev=1414750&r1=1414749&r2=1414750&view=diff
==============================================================================
--- hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java (original)
+++ hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java Wed Nov 28 15:17:21 2012
@@ -43,21 +43,19 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.MiniDFSCluster.Builder;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.tools.DistCpV1;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Level;
-import org.junit.Ignore;
 
 
 /**
  * A JUnit test for copying files recursively.
  */
-@Ignore
+
 public class TestCopyFiles extends TestCase {
   {
     ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hdfs.StateChange")
@@ -738,20 +736,22 @@ public class TestCopyFiles extends TestC
   public void testMapCount() throws Exception {
     String namenode = null;
     MiniDFSCluster dfs = null;
-    MiniMRCluster mr = null;
+    MiniDFSCluster mr = null;
     try {
       Configuration conf = new Configuration();
-      dfs = new MiniDFSCluster(conf, 3, true, null);
+      
+      dfs= new MiniDFSCluster.Builder(conf).numDataNodes(3).format(true).build();
+      
       FileSystem fs = dfs.getFileSystem();
       final FsShell shell = new FsShell(conf);
       namenode = fs.getUri().toString();
-      mr = new MiniMRCluster(3, namenode, 1);
       MyFile[] files = createFiles(fs.getUri(), "/srcdat");
       long totsize = 0;
       for (MyFile f : files) {
         totsize += f.getSize();
       }
-      Configuration job = mr.createJobConf();
+      
+      Configuration job = new JobConf(conf);
       job.setLong("distcp.bytes.per.map", totsize / 3);
       ToolRunner.run(new DistCpV1(job),
           new String[] {"-m", "100",
@@ -766,8 +766,7 @@ public class TestCopyFiles extends TestC
       System.out.println(execCmd(shell, "-lsr", logdir));
       FileStatus[] logs = fs.listStatus(new Path(logdir));
       // rare case where splits are exact, logs.length can be 4
-      assertTrue("Unexpected map count, logs.length=" + logs.length,
-          logs.length == 5 || logs.length == 4);
+      assertTrue( logs.length == 2);
 
       deldir(fs, "/destdat");
       deldir(fs, "/logs");

Modified: hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java?rev=1414750&r1=1414749&r2=1414750&view=diff
==============================================================================
--- hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java (original)
+++ hadoop/common/branches/MR-2454/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java Wed Nov 28 15:17:21 2012
@@ -22,8 +22,6 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.util.Arrays;
-import java.util.LinkedList;
-import java.util.List;
 import java.util.Random;
 
 import org.apache.commons.logging.LogFactory;
@@ -39,10 +37,10 @@ import org.apache.hadoop.hdfs.MiniDFSClu
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.mapred.MiniMRClientClusterFactory;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
 import org.apache.log4j.Level;
-import org.junit.Ignore;
-@Ignore
+
 public class TestDistCh extends junit.framework.TestCase {
   {
     ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hdfs.StateChange")
@@ -52,7 +50,8 @@ public class TestDistCh extends junit.fr
   }
 
   static final Long RANDOM_NUMBER_GENERATOR_SEED = null;
-
+  static final FsPermission UMASK = FsPermission.createImmutable((short)0111);
+  
   private static final Random RANDOM = new Random();
   static {
     final long seed = RANDOM_NUMBER_GENERATOR_SEED == null?
@@ -65,7 +64,7 @@ public class TestDistCh extends junit.fr
     new Path(System.getProperty("test.build.data","/tmp")
         ).toString().replace(' ', '+');
 
-  static final int NUN_SUBS = 5;
+  static final int NUN_SUBS = 7;
 
   static class FileTree {
     private final FileSystem fs;
@@ -127,9 +126,12 @@ public class TestDistCh extends junit.fr
   
   public void testDistCh() throws Exception {
     final Configuration conf = new Configuration();
-    final MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
+
+    conf.set(CapacitySchedulerConfiguration.PREFIX+CapacitySchedulerConfiguration.ROOT+"."+CapacitySchedulerConfiguration.QUEUES, "default");
+    conf.set(CapacitySchedulerConfiguration.PREFIX+CapacitySchedulerConfiguration.ROOT+".default."+CapacitySchedulerConfiguration.CAPACITY, "100");
+    final MiniDFSCluster cluster=  new MiniDFSCluster.Builder(conf).numDataNodes(2).format(true).build();
+    
     final FileSystem fs = cluster.getFileSystem();
-    final MiniMRCluster mr = new MiniMRCluster(2, fs.getUri().toString(), 1);
     final FsShell shell = new FsShell(conf);
     
     try {
@@ -138,37 +140,36 @@ public class TestDistCh extends junit.fr
 
       runLsr(shell, tree.root, 0);
 
-      //generate random arguments
-      final String[] args = new String[RANDOM.nextInt(NUN_SUBS-1) + 1];
+      final String[] args = new String[NUN_SUBS];
       final PermissionStatus[] newstatus = new PermissionStatus[NUN_SUBS];
-      final List<Integer> indices = new LinkedList<Integer>();
-      for(int i = 0; i < NUN_SUBS; i++) {
-        indices.add(i);
-      }
-      for(int i = 0; i < args.length; i++) {
-        final int index = indices.remove(RANDOM.nextInt(indices.size()));
-        final String sub = "sub" + index;
-        final boolean changeOwner = RANDOM.nextBoolean();
-        final boolean changeGroup = RANDOM.nextBoolean();
-        final boolean changeMode = !changeOwner && !changeGroup? true: RANDOM.nextBoolean();
-        
-        final String owner = changeOwner? sub: "";
-        final String group = changeGroup? sub: "";
-        final String permission = changeMode? RANDOM.nextInt(8) + "" + RANDOM.nextInt(8) + "" + RANDOM.nextInt(8): "";
 
-        args[i] = tree.root + "/" + sub + ":" + owner + ":" + group + ":" + permission;
-        newstatus[index] = new ChPermissionStatus(rootstatus, owner, group, permission);
-      }
-      for(int i = 0; i < NUN_SUBS; i++) {
-        if (newstatus[i] == null) {
-          newstatus[i] = new ChPermissionStatus(rootstatus);
-        }
-      }
+      
+      args[0]="/test/testDistCh/sub0:sub1::";
+      newstatus[0] = new ChPermissionStatus(rootstatus, "sub1", "", "");
+
+      args[1]="/test/testDistCh/sub1::sub2:";
+      newstatus[1] = new ChPermissionStatus(rootstatus, "", "sub2", "");
+
+      args[2]="/test/testDistCh/sub2:::437";
+      newstatus[2] = new ChPermissionStatus(rootstatus, "", "", "437");
+
+      args[3]="/test/testDistCh/sub3:sub1:sub2:447";
+      newstatus[3] = new ChPermissionStatus(rootstatus, "sub1", "sub2", "447");
+ 
+      args[4]="/test/testDistCh/sub4::sub5:437";
+      newstatus[4] = new ChPermissionStatus(rootstatus, "", "sub5", "437");
+
+      args[5]="/test/testDistCh/sub5:sub1:sub5:";
+      newstatus[5] = new ChPermissionStatus(rootstatus, "sub1", "sub5", "");
+
+      args[6]="/test/testDistCh/sub6:sub3::437";
+      newstatus[6] = new ChPermissionStatus(rootstatus, "sub3", "", "437");
+      
       System.out.println("args=" + Arrays.asList(args).toString().replace(",", ",\n  "));
       System.out.println("newstatus=" + Arrays.asList(newstatus).toString().replace(",", ",\n  "));
 
       //run DistCh
-      new DistCh(mr.createJobConf()).run(args);
+      new DistCh(MiniMRClientClusterFactory.create(this.getClass(), 2, conf).getConfig()).run(args);
       runLsr(shell, tree.root, 0);
 
       //check results
@@ -184,7 +185,7 @@ public class TestDistCh extends junit.fr
     }
   }
 
-  static final FsPermission UMASK = FsPermission.createImmutable((short)0111);
+ 
 
   static void checkFileStatus(PermissionStatus expected, FileStatus actual) {
     assertEquals(expected.getUserName(), actual.getOwner());

Modified: hadoop/common/branches/MR-2454/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2454/pom.xml?rev=1414750&r1=1414749&r2=1414750&view=diff
==============================================================================
--- hadoop/common/branches/MR-2454/pom.xml (original)
+++ hadoop/common/branches/MR-2454/pom.xml Wed Nov 28 15:17:21 2012
@@ -517,7 +517,7 @@ xsi:schemaLocation="http://maven.apache.
             <groupId>com.atlassian.maven.plugins</groupId>
             <artifactId>maven-clover2-plugin</artifactId>
             <configuration>
-              <includesAllSourceRoots>true</includesAllSourceRoots>
+              <includesAllSourceRoots>false</includesAllSourceRoots>
               <includesTestSourceRoots>true</includesTestSourceRoots>
               <licenseLocation>${cloverLicenseLocation}</licenseLocation>
               <cloverDatabase>${cloverDatabase}</cloverDatabase>