You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by jb...@apache.org on 2021/02/26 20:59:41 UTC

[hadoop] branch branch-3.2 updated: MAPREDUCE-7320. organize test directories for ClusterMapReduceTestCase (#2722). Contributed by Ahmed Hussein

This is an automated email from the ASF dual-hosted git repository.

jbrennan pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new e4c638e  MAPREDUCE-7320. organize test directories for ClusterMapReduceTestCase (#2722). Contributed by Ahmed Hussein
e4c638e is described below

commit e4c638eb21c3657369bc81c1e8cdd7b319903500
Author: Ahmed Hussein <50...@users.noreply.github.com>
AuthorDate: Fri Feb 26 13:42:33 2021 -0600

    MAPREDUCE-7320. organize test directories for ClusterMapReduceTestCase (#2722). Contributed by Ahmed Hussein
    
    (cherry picked from commit e04bcb3a061f7d89fb1353cd5ef6f550c049f36c)
---
 .../org/apache/hadoop/test/GenericTestUtils.java   | 16 ++++++
 .../java/org/apache/hadoop/util/JarFinder.java     |  8 ++-
 .../hadoop/mapred/ClusterMapReduceTestCase.java    | 62 ++++++++--------------
 .../hadoop/mapred/MiniMRClientClusterFactory.java  |  5 +-
 .../org/apache/hadoop/mapred/TestBadRecords.java   |  9 +++-
 .../mapred/TestClusterMapReduceTestCase.java       |  9 +++-
 .../java/org/apache/hadoop/mapred/TestJobName.java |  7 +++
 .../apache/hadoop/mapred/TestMRCJCJobClient.java   |  9 +++-
 .../apache/hadoop/mapreduce/TestMRJobClient.java   |  6 +++
 .../security/ssl/TestEncryptedShuffle.java         | 30 +++++------
 .../hadoop/mapreduce/v2/MiniMRYarnCluster.java     |  5 ++
 .../hadoop/streaming/TestStreamingBadRecords.java  |  9 +++-
 .../apache/hadoop/yarn/server/MiniYARNCluster.java | 10 +++-
 13 files changed, 118 insertions(+), 67 deletions(-)

diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java
index 79fc008..e6ca6ab 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java
@@ -230,6 +230,22 @@ public abstract class GenericTestUtils {
   }
 
   /**
+   * Creates a directory for the data/logs of the unit test.
+   * It first deletes the directory if it exists.
+   *
+   * @param testClass the unit test class.
+   * @return the Path of the root directory.
+   */
+  public static File setupTestRootDir(Class<?> testClass) {
+    File testRootDir = getTestDir(testClass.getSimpleName());
+    if (testRootDir.exists()) {
+      FileUtil.fullyDelete(testRootDir);
+    }
+    testRootDir.mkdirs();
+    return testRootDir;
+  }
+
+  /**
    * Get the (created) base directory for tests.
    * @return the absolute directory
    */
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java
index 478a29b..d4b1b92 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java
@@ -132,6 +132,10 @@ public class JarFinder {
    * @return path to the Jar containing the class.
    */
   public static String getJar(Class klass) {
+    return getJar(klass, null);
+  }
+
+  public static String getJar(Class klass, String testSubDir) {
     Preconditions.checkNotNull(klass, "klass");
     ClassLoader loader = klass.getClassLoader();
     if (loader != null) {
@@ -154,7 +158,9 @@ public class JarFinder {
             klassName = klassName.replace(".", "/") + ".class";
             path = path.substring(0, path.length() - klassName.length());
             File baseDir = new File(path);
-            File testDir = GenericTestUtils.getTestDir();
+            File testDir =
+                testSubDir == null ? GenericTestUtils.getTestDir()
+                    : GenericTestUtils.getTestDir(testSubDir);
             testDir = testDir.getAbsoluteFile();
             if (!testDir.exists()) {
               testDir.mkdirs();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
index 8d33b15..f16b8a0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java
@@ -20,9 +20,12 @@ package org.apache.hadoop.mapred;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.test.GenericTestUtils;
+
 import org.junit.After;
 import org.junit.Before;
 
+import java.io.File;
 import java.io.IOException;
 import java.util.Map;
 import java.util.Properties;
@@ -43,8 +46,18 @@ import java.util.Properties;
  * The DFS filesystem is formated before the testcase starts and after it ends.
  */
 public abstract class ClusterMapReduceTestCase {
+  private static File testRootDir;
+  private static File dfsFolder;
+
   private MiniDFSCluster dfsCluster = null;
-  private MiniMRCluster mrCluster = null;
+  private MiniMRClientCluster mrCluster = null;
+
+  protected static void setupClassBase(Class<?> testClass) throws Exception {
+    // setup the test root directory
+    testRootDir = GenericTestUtils.setupTestRootDir(testClass);
+    dfsFolder = new File(testRootDir, "dfs");
+  }
+
 
   /**
    * Creates Hadoop Cluster and DFS before a test case is run.
@@ -78,37 +91,10 @@ public abstract class ClusterMapReduceTestCase {
           conf.set((String) entry.getKey(), (String) entry.getValue());
         }
       }
-      dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2)
-      .format(reformatDFS).racks(null).build();
-
-      ConfigurableMiniMRCluster.setConfiguration(props);
-      //noinspection deprecation
-      mrCluster = new ConfigurableMiniMRCluster(2,
-          getFileSystem().getUri().toString(), 1, conf);
-    }
-  }
-
-  private static class ConfigurableMiniMRCluster extends MiniMRCluster {
-    private static Properties config;
-
-    public static void setConfiguration(Properties props) {
-      config = props;
-    }
-
-    public ConfigurableMiniMRCluster(int numTaskTrackers, String namenode,
-                                     int numDir, JobConf conf)
-        throws Exception {
-      super(0,0, numTaskTrackers, namenode, numDir, null, null, null, conf);
-    }
-
-    public JobConf createJobConf() {
-      JobConf conf = super.createJobConf();
-      if (config != null) {
-        for (Map.Entry entry : config.entrySet()) {
-          conf.set((String) entry.getKey(), (String) entry.getValue());
-        }
-      }
-      return conf;
+      dfsCluster =
+          new MiniDFSCluster.Builder(conf, dfsFolder)
+              .numDataNodes(2).format(reformatDFS).racks(null).build();
+      mrCluster = MiniMRClientClusterFactory.create(this.getClass(), 2, conf);
     }
   }
 
@@ -125,7 +111,7 @@ public abstract class ClusterMapReduceTestCase {
    */
   protected void stopCluster() throws Exception {
     if (mrCluster != null) {
-      mrCluster.shutdown();
+      mrCluster.stop();
       mrCluster = null;
     }
     if (dfsCluster != null) {
@@ -157,17 +143,13 @@ public abstract class ClusterMapReduceTestCase {
     return dfsCluster.getFileSystem();
   }
 
-  protected MiniMRCluster getMRCluster() {
-    return mrCluster;
-  }
-
   /**
    * Returns the path to the root directory for the testcase.
    *
    * @return path to the root directory for the testcase.
    */
   protected Path getTestRootDir() {
-    return new Path("x").getParent();
+    return new Path(testRootDir.getPath());
   }
 
   /**
@@ -194,8 +176,8 @@ public abstract class ClusterMapReduceTestCase {
    *
    * @return configuration that works on the testcase Hadoop instance
    */
-  protected JobConf createJobConf() {
-    return mrCluster.createJobConf();
+  protected JobConf createJobConf() throws IOException {
+    return new JobConf(mrCluster.getConfig());
   }
 
 }
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientClusterFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientClusterFactory.java
index 85c534b..33b85b9 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientClusterFactory.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientClusterFactory.java
@@ -55,7 +55,8 @@ public class MiniMRClientClusterFactory {
     Path appJar = new Path(testRootDir, "MRAppJar.jar");
 
     // Copy MRAppJar and make it private.
-    Path appMasterJar = new Path(MiniMRYarnCluster.APPJAR);
+    Path appMasterJar =
+        new Path(MiniMRYarnCluster.copyAppJarIntoTestDir(identifier));
 
     fs.copyFromLocalFile(appMasterJar, appJar);
     fs.setPermission(appJar, new FsPermission("744"));
@@ -64,7 +65,7 @@ public class MiniMRClientClusterFactory {
 
     job.addFileToClassPath(appJar);
 
-    Path callerJar = new Path(JarFinder.getJar(caller));
+    Path callerJar = new Path(JarFinder.getJar(caller, identifier));
     Path remoteCallerJar = new Path(testRootDir, callerJar.getName());
     fs.copyFromLocalFile(callerJar, remoteCallerJar);
     fs.setPermission(remoteCallerJar, new FsPermission("744"));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
index b45a2a6..1b39583 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
@@ -37,6 +37,8 @@ import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.util.ReflectionUtils;
+
+import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -58,7 +60,12 @@ public class TestBadRecords extends ClusterMapReduceTestCase {
     Arrays.asList("hello08","hello10");
   
   private List<String> input;
-  
+
+  @BeforeClass
+  public static void setupClass() throws Exception {
+    setupClassBase(TestBadRecords.class);
+  }
+
   public TestBadRecords() {
     input = new ArrayList<String>();
     for(int i=1;i<=10;i++) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java
index f04fbd7..b4e8de2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java
@@ -29,6 +29,8 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
+
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 import static org.junit.Assert.assertTrue;
@@ -36,6 +38,12 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertFalse;
 public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
+
+  @BeforeClass
+  public static void setupClass() throws Exception {
+    setupClassBase(TestClusterMapReduceTestCase.class);
+  }
+
   public void _testMapReduce(boolean restart) throws Exception {
     OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
     Writer wr = new OutputStreamWriter(os);
@@ -88,7 +96,6 @@ public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
       reader.close();
       assertEquals(4, counter);
     }
-
   }
 
   @Test
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java
index 2659a14..f50089a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java
@@ -29,12 +29,19 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
+
+import org.junit.BeforeClass;
 import org.junit.Test;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 
 public class TestJobName extends ClusterMapReduceTestCase {
 
+  @BeforeClass
+  public static void setupClass() throws Exception {
+    setupClassBase(TestJobName.class);
+  }
+
   @Test
   public void testComplexName() throws Exception {
     OutputStream os = getFileSystem().create(new Path(getInputDir(),
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java
index 9a2af0c..9a2c744 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java
@@ -29,10 +29,17 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.TestMRJobClient;
 import org.apache.hadoop.mapreduce.tools.CLI;
 import org.apache.hadoop.util.Tool;
+
+import org.junit.BeforeClass;
 import org.junit.Ignore;
 @Ignore
 public class TestMRCJCJobClient extends TestMRJobClient {
-  
+
+  @BeforeClass
+  public static void setupClass() throws Exception {
+    setupClassBase(TestMRCJCJobClient.class);
+  }
+
   private String runJob() throws Exception {
     OutputStream os = getFileSystem().create(new Path(getInputDir(),
                         "text.txt"));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java
index f4ccc56..31b90aa 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -63,6 +64,11 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
   private static final Logger LOG =
       LoggerFactory.getLogger(TestMRJobClient.class);
 
+  @BeforeClass
+  public static void setupClass() throws Exception {
+    setupClassBase(TestMRJobClient.class);
+  }
+
   private Job runJob(Configuration conf) throws Exception {
     String input = "hello1\nhello2\nhello3\n";
 
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java
index d870d25..ed80f65 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.mapreduce.security.ssl;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -31,58 +30,55 @@ import org.apache.hadoop.mapred.RunningJob;
 
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.junit.After;
-import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.Assert;
 
-import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
-import java.net.URL;
 
 public class TestEncryptedShuffle {
 
-  private static final String BASEDIR =
-    System.getProperty("test.build.dir", "target/test-dir") + "/" +
-    TestEncryptedShuffle.class.getSimpleName();
-  
-  private String classpathDir;
+  private static File testRootDir;
 
   @BeforeClass
   public static void setUp() throws Exception {
-    File base = new File(BASEDIR);
-    FileUtil.fullyDelete(base);
-    base.mkdirs();
+    testRootDir =
+        GenericTestUtils.setupTestRootDir(TestEncryptedShuffle.class);
   }
 
   @Before
   public void createCustomYarnClasspath() throws Exception {
     classpathDir = KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class);
     new File(classpathDir, "core-site.xml").delete();
+    dfsFolder = new File(testRootDir, String.format("dfs-%d",
+        Time.monotonicNow()));
   }
 
   @After
   public void cleanUpMiniClusterSpecialConfig() throws Exception {
     new File(classpathDir, "core-site.xml").delete();
-    String keystoresDir = new File(BASEDIR).getAbsolutePath();
+    String keystoresDir = testRootDir.getAbsolutePath();
     KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, classpathDir);
   }
 
+  private String classpathDir;
   private MiniDFSCluster dfsCluster = null;
   private MiniMRClientCluster mrCluster = null;
+  private File dfsFolder;
 
   private void startCluster(Configuration  conf) throws Exception {
     if (System.getProperty("hadoop.log.dir") == null) {
-      System.setProperty("hadoop.log.dir", "target/test-dir");
+      System.setProperty("hadoop.log.dir", testRootDir.getAbsolutePath());
     }
     conf.set("dfs.block.access.token.enable", "false");
     conf.set("dfs.permissions", "true");
@@ -92,7 +88,7 @@ public class TestEncryptedShuffle {
             YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH))
         + File.pathSeparator + classpathDir;
     conf.set(YarnConfiguration.YARN_APPLICATION_CLASSPATH, cp);
-    dfsCluster = new MiniDFSCluster.Builder(conf).build();
+    dfsCluster = new MiniDFSCluster.Builder(conf, dfsFolder).build();
     FileSystem fileSystem = dfsCluster.getFileSystem();
     fileSystem.mkdirs(new Path("/tmp"));
     fileSystem.mkdirs(new Path("/user"));
@@ -129,7 +125,7 @@ public class TestEncryptedShuffle {
     throws Exception {
     try {
       Configuration conf = new Configuration();
-      String keystoresDir = new File(BASEDIR).getAbsolutePath();
+      String keystoresDir = testRootDir.getAbsolutePath();
       String sslConfsDir =
         KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class);
       KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfsDir, conf,
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
index 289c17e..886c78c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
@@ -74,6 +74,7 @@ public class MiniMRYarnCluster extends MiniYARNCluster {
     this(testName, 1);
   }
 
+  @SuppressWarnings("deprecation")
   public MiniMRYarnCluster(String testName, int noOfNMs) {
     this(testName, noOfNMs, false);
   }
@@ -84,6 +85,10 @@ public class MiniMRYarnCluster extends MiniYARNCluster {
     addService(historyServerWrapper);
   }
 
+  public static String copyAppJarIntoTestDir(String testSubdir) {
+    return JarFinder.getJar(LocalContainerLauncher.class, testSubdir);
+  }
+
   public static String getResolvedMRHistoryWebAppURLWithoutScheme(
       Configuration conf, boolean isSSLEnabled) {
     InetSocketAddress address = null;
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
index 0ef1ff0..5a4e3a9 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java
@@ -31,13 +31,13 @@ import java.util.List;
 import java.util.Properties;
 import java.util.StringTokenizer;
 
+import org.junit.BeforeClass;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
 import org.apache.hadoop.mapred.Counters;
-import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.SkipBadRecords;
 import org.apache.hadoop.mapred.Utils;
@@ -65,7 +65,12 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase
   private static final String badReducer = 
     UtilTest.makeJavaCommand(BadApp.class, new String[]{"true"});
   private static final int INPUTSIZE=100;
-  
+
+  @BeforeClass
+  public static void setupClass() throws Exception {
+    setupClassBase(TestStreamingBadRecords.class);
+  }
+
   public TestStreamingBadRecords() throws IOException
   {
     UtilTest utilTest = new UtilTest(getClass().getName());
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
index fa69f18..1aedb18 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
@@ -41,8 +41,10 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.net.ServerSocketUtil;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.service.CompositeService;
+import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+import org.apache.hadoop.util.Time;
 import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.conf.HAUtil;
@@ -172,8 +174,11 @@ public class MiniYARNCluster extends CompositeService {
     this.numLocalDirs = numLocalDirs;
     this.numLogDirs = numLogDirs;
     this.enableAHS = enableAHS;
-    String testSubDir = testName.replace("$", "");
-    File targetWorkDir = new File("target", testSubDir);
+    String yarnFolderName = String.format("yarn-%d", Time.monotonicNow());
+    File targetWorkDirRoot = GenericTestUtils.getTestDir(getName());
+    // make sure that the folder exists
+    targetWorkDirRoot.mkdirs();
+    File targetWorkDir = new File(targetWorkDirRoot, yarnFolderName);
     try {
       FileContext.getLocalFSFileContext().delete(
           new Path(targetWorkDir.getAbsolutePath()), true);
@@ -228,6 +233,7 @@ public class MiniYARNCluster extends CompositeService {
    * @param numLocalDirs the number of nm-local-dirs per nodemanager
    * @param numLogDirs the number of nm-log-dirs per nodemanager
    */
+  @SuppressWarnings("deprecation")
   public MiniYARNCluster(
       String testName, int numResourceManagers, int numNodeManagers,
       int numLocalDirs, int numLogDirs) {


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org