You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cn...@apache.org on 2016/10/26 15:39:14 UTC

[2/3] hadoop git commit: HADOOP-13614. Purge some superfluous/obsolete S3 FS tests that are slowing test runs down. Contributed by Steve Loughran.

HADOOP-13614. Purge some superfluous/obsolete S3 FS tests that are slowing test runs down. Contributed by Steve Loughran.

(cherry picked from commit 9cad3e235026dbe4658705ca85d263d0edf14521)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/67e01f72
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/67e01f72
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/67e01f72

Branch: refs/heads/branch-2
Commit: 67e01f7218e592d7b18316d65f7b22ae8b9ad7a6
Parents: 0cd43dd
Author: Chris Nauroth <cn...@apache.org>
Authored: Wed Oct 26 08:27:26 2016 -0700
Committer: Chris Nauroth <cn...@apache.org>
Committed: Wed Oct 26 08:27:33 2016 -0700

----------------------------------------------------------------------
 .../hadoop/fs/contract/ContractTestUtils.java   |   6 +
 .../TestFSMainOperationsLocalFileSystem.java    |   4 +-
 hadoop-tools/hadoop-aws/pom.xml                 |   7 -
 .../fs/contract/s3a/ITestS3AContractDistCp.java |   6 +
 .../hadoop/fs/contract/s3a/S3AContract.java     |   6 +-
 .../hadoop/fs/s3a/AbstractS3ATestBase.java      |  26 +++-
 .../fs/s3a/ITestS3ABlockingThreadPool.java      |  82 -----------
 .../apache/hadoop/fs/s3a/ITestS3ABlocksize.java |  19 +--
 .../hadoop/fs/s3a/ITestS3AConfiguration.java    |  23 +--
 .../hadoop/fs/s3a/ITestS3AEncryption.java       |   9 +-
 .../ITestS3AEncryptionAlgorithmPropagation.java |   7 -
 .../hadoop/fs/s3a/ITestS3AFailureHandling.java  |  11 +-
 .../fs/s3a/ITestS3AFileOperationCost.java       |  19 +--
 .../fs/s3a/ITestS3AFileSystemContract.java      |  33 ++++-
 .../fs/s3a/ITestS3ATemporaryCredentials.java    |  14 +-
 .../apache/hadoop/fs/s3a/S3ATestConstants.java  |  27 +++-
 .../org/apache/hadoop/fs/s3a/S3ATestUtils.java  |  16 ++-
 .../fs/s3a/scale/AbstractSTestS3AHugeFiles.java |  25 ++--
 .../s3a/scale/ITestS3ADeleteFilesOneByOne.java  |  12 +-
 .../fs/s3a/scale/ITestS3ADeleteManyFiles.java   |  13 +-
 .../s3a/scale/ITestS3ADirectoryPerformance.java |  16 ++-
 .../scale/ITestS3AHugeFilesClassicOutput.java   |   4 +-
 .../scale/ITestS3AInputStreamPerformance.java   |   3 +-
 .../hadoop/fs/s3a/scale/S3AScaleTestBase.java   | 139 +++++++++----------
 .../org/apache/hadoop/fs/s3a/yarn/ITestS3A.java |   4 +-
 .../fs/s3a/yarn/ITestS3AMiniYarnCluster.java    |  50 +++----
 26 files changed, 255 insertions(+), 326 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
index 73c8f1c..f6b6389 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
@@ -834,6 +834,7 @@ public class ContractTestUtils extends Assert {
 
     long totalBytesRead = 0;
     int nextExpectedNumber = 0;
+    NanoTimer timer = new NanoTimer();
     try (InputStream inputStream = fs.open(path)) {
       while (true) {
         final int bytesRead = inputStream.read(testBuffer);
@@ -862,6 +863,8 @@ public class ContractTestUtils extends Assert {
             " bytes but only received " + totalBytesRead);
       }
     }
+    timer.end("Time to read %d bytes", expectedSize);
+    bandwidth(timer, expectedSize);
   }
 
   /**
@@ -925,9 +928,12 @@ public class ContractTestUtils extends Assert {
     final Path objectPath = new Path(parent, objectName);
 
     // Write test file in a specific pattern
+    NanoTimer timer = new NanoTimer();
     assertEquals(fileSize,
         generateTestFile(fs, objectPath, fileSize, testBufferSize, modulus));
     assertPathExists(fs, "not created successful", objectPath);
+    timer.end("Time to write %d bytes", fileSize);
+    bandwidth(timer, fileSize);
 
     // Now read the same file back and verify its content
     try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java
index 6081f38..12687fd 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -46,7 +46,7 @@ public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTes
     fcTarget = FileSystem.getLocal(conf);
     super.setUp();
   }
-  
+
   @Override
   @After
   public void tearDown() throws Exception {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/pom.xml b/hadoop-tools/hadoop-aws/pom.xml
index 28825d9..1c70638 100644
--- a/hadoop-tools/hadoop-aws/pom.xml
+++ b/hadoop-tools/hadoop-aws/pom.xml
@@ -181,9 +181,6 @@
                   </includes>
                   <excludes>
                     <exclude>**/ITestJets3tNativeS3FileSystemContract.java</exclude>
-                    <exclude>**/ITestS3ABlockingThreadPool.java</exclude>
-                    <exclude>**/ITestS3AFileSystemContract.java</exclude>
-                    <exclude>**/ITestS3AMiniYarnCluster.java</exclude>
                     <exclude>**/ITest*Root*.java</exclude>
                     <exclude>**/ITestS3AFileContextStatistics.java</exclude>
                     <include>**/ITestS3AHuge*.java</include>
@@ -211,10 +208,6 @@
                   <!-- parallel execution. -->
                   <includes>
                     <include>**/ITestJets3tNativeS3FileSystemContract.java</include>
-                    <include>**/ITestS3ABlockingThreadPool.java</include>
-                    <include>**/ITestS3AFastOutputStream.java</include>
-                    <include>**/ITestS3AFileSystemContract.java</include>
-                    <include>**/ITestS3AMiniYarnCluster.java</include>
                     <include>**/ITest*Root*.java</include>
                     <include>**/ITestS3AFileContextStatistics.java</include>
                     <include>**/ITestS3AHuge*.java</include>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java
index 9e14ed2..50ce0c2 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.fs.contract.s3a;
 
 import static org.apache.hadoop.fs.s3a.Constants.*;
+import static org.apache.hadoop.fs.s3a.S3ATestConstants.SCALE_TEST_TIMEOUT_MILLIS;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.tools.contract.AbstractContractDistCpTest;
@@ -33,6 +34,11 @@ public class ITestS3AContractDistCp extends AbstractContractDistCpTest {
   private static final long MULTIPART_SETTING = MULTIPART_MIN_SIZE;
 
   @Override
+  protected int getTestTimeoutMillis() {
+    return SCALE_TEST_TIMEOUT_MILLIS;
+  }
+
+  @Override
   protected Configuration createConfiguration() {
     Configuration newConf = super.createConfiguration();
     newConf.setLong(MULTIPART_SIZE, MULTIPART_SETTING);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/S3AContract.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/S3AContract.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/S3AContract.java
index e9024b5..3510a64 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/S3AContract.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/S3AContract.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.fs.contract.s3a;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.contract.AbstractBondedFSContract;
+import org.apache.hadoop.fs.s3a.S3ATestUtils;
 
 /**
  * The contract of S3A: only enabled if the test bucket is provided.
@@ -29,7 +30,6 @@ public class S3AContract extends AbstractBondedFSContract {
 
   public static final String CONTRACT_XML = "contract/s3a.xml";
 
-
   public S3AContract(Configuration conf) {
     super(conf);
     //insert the base features
@@ -43,8 +43,6 @@ public class S3AContract extends AbstractBondedFSContract {
 
   @Override
   public Path getTestPath() {
-    String testUniqueForkId = System.getProperty("test.unique.fork.id");
-    return testUniqueForkId == null ? super.getTestPath() :
-        new Path("/" + testUniqueForkId, "test");
+    return S3ATestUtils.createTestPath(super.getTestPath());
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java
index e049fd1..c19b72c 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java
@@ -26,8 +26,8 @@ import org.apache.hadoop.fs.contract.ContractTestUtils;
 import org.apache.hadoop.fs.contract.s3a.S3AContract;
 import org.apache.hadoop.io.IOUtils;
 import org.junit.Before;
-import org.junit.Rule;
-import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 
@@ -40,6 +40,9 @@ import static org.apache.hadoop.fs.contract.ContractTestUtils.writeDataset;
 public abstract class AbstractS3ATestBase extends AbstractFSContractTestBase
     implements S3ATestConstants {
 
+  protected static final Logger LOG =
+      LoggerFactory.getLogger(AbstractS3ATestBase.class);
+
   @Override
   protected AbstractFSContract createContract(Configuration conf) {
     return new S3AContract(conf);
@@ -52,14 +55,16 @@ public abstract class AbstractS3ATestBase extends AbstractFSContractTestBase
     IOUtils.closeStream(getFileSystem());
   }
 
-  @Rule
-  public TestName methodName = new TestName();
-
   @Before
   public void nameThread() {
     Thread.currentThread().setName("JUnit-" + methodName.getMethodName());
   }
 
+  @Override
+  protected int getTestTimeoutMillis() {
+    return S3A_TEST_TIMEOUT;
+  }
+
   protected Configuration getConfiguration() {
     return getContract().getConf();
   }
@@ -74,6 +79,17 @@ public abstract class AbstractS3ATestBase extends AbstractFSContractTestBase
   }
 
   /**
+   * Describe a test in the logs.
+   * @param text text to print
+   * @param args arguments to format in the printing
+   */
+  protected void describe(String text, Object... args) {
+    LOG.info("\n\n{}: {}\n",
+        methodName.getMethodName(),
+        String.format(text, args));
+  }
+
+  /**
    * Write a file, read it back, validate the dataset. Overwrites the file
    * if it is present
    * @param name filename (will have the test path prepended to it)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java
deleted file mode 100644
index 991135e..0000000
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3a;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.Timeout;
-
-/**
- * Demonstrate that the threadpool blocks additional client requests if
- * its queue is full (rather than throwing an exception) by initiating an
- * upload consisting of 4 parts with 2 threads and 1 spot in the queue. The
- * 4th part should not trigger an exception as it would with a
- * non-blocking threadpool.
- */
-public class ITestS3ABlockingThreadPool {
-
-  private Configuration conf;
-  private S3AFileSystem fs;
-
-  @Rule
-  public Timeout testTimeout = new Timeout(30 * 60 * 1000);
-
-  protected Path getTestPath() {
-    return new Path("/tests3a");
-  }
-
-  @Before
-  public void setUp() throws Exception {
-    conf = new Configuration();
-    conf.setLong(Constants.MIN_MULTIPART_THRESHOLD, 5 * 1024 * 1024);
-    conf.setLong(Constants.MULTIPART_SIZE, 5 * 1024 * 1024);
-    conf.setInt(Constants.MAX_THREADS, 2);
-    conf.setInt(Constants.MAX_TOTAL_TASKS, 1);
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    if (fs != null) {
-      fs.delete(getTestPath(), true);
-    }
-  }
-
-  @Test
-  public void testRegularMultiPartUpload() throws Exception {
-    fs = S3ATestUtils.createTestFileSystem(conf);
-    ContractTestUtils.createAndVerifyFile(fs, getTestPath(), 16 * 1024 *
-        1024);
-  }
-
-  @Test
-  public void testFastMultiPartUpload() throws Exception {
-    conf.setBoolean(Constants.FAST_UPLOAD, true);
-    conf.set(Constants.FAST_UPLOAD_BUFFER,
-        Constants.FAST_UPLOAD_BYTEBUFFER);
-    fs = S3ATestUtils.createTestFileSystem(conf);
-    ContractTestUtils.createAndVerifyFile(fs, getTestPath(), 16 * 1024 *
-        1024);
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java
index 9a6dae7..2f630ab 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,16 +18,11 @@
 
 package org.apache.hadoop.fs.s3a;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.apache.hadoop.fs.contract.AbstractFSContractTestBase;
-import org.apache.hadoop.fs.contract.s3a.S3AContract;
-import org.junit.Rule;
+
 import org.junit.Test;
-import org.junit.rules.Timeout;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -38,19 +33,11 @@ import static org.apache.hadoop.fs.contract.ContractTestUtils.fileStatsToString;
 /**
  * S3A tests for configuring block size.
  */
-public class ITestS3ABlocksize extends AbstractFSContractTestBase {
+public class ITestS3ABlocksize extends AbstractS3ATestBase {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(ITestS3ABlocksize.class);
 
-  @Override
-  protected AbstractFSContract createContract(Configuration conf) {
-    return new S3AContract(conf);
-  }
-
-  @Rule
-  public Timeout testTimeout = new Timeout(30 * 60 * 1000);
-
   @Test
   @SuppressWarnings("deprecation")
   public void testBlockSize() throws Exception {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
index 04057a9..6ae9613 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
@@ -68,34 +68,37 @@ public class ITestS3AConfiguration {
   private static final Logger LOG =
       LoggerFactory.getLogger(ITestS3AConfiguration.class);
 
-  private static final String TEST_ENDPOINT = "test.fs.s3a.endpoint";
-
   @Rule
-  public Timeout testTimeout = new Timeout(30 * 60 * 1000);
+  public Timeout testTimeout = new Timeout(
+      S3ATestConstants.S3A_TEST_TIMEOUT
+  );
 
   @Rule
   public final TemporaryFolder tempDir = new TemporaryFolder();
 
   /**
    * Test if custom endpoint is picked up.
-   * <p/>
-   * The test expects TEST_ENDPOINT to be defined in the Configuration
+   * <p>
+   * The test expects {@link S3ATestConstants#CONFIGURATION_TEST_ENDPOINT}
+   * to be defined in the Configuration
    * describing the endpoint of the bucket to which TEST_FS_S3A_NAME points
-   * (f.i. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland).
+   * (i.e. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland).
    * Evidently, the bucket has to be hosted in the region denoted by the
    * endpoint for the test to succeed.
-   * <p/>
+   * <p>
    * More info and the list of endpoint identifiers:
-   * http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
+   * @see <a href="http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region">endpoint list</a>.
    *
    * @throws Exception
    */
   @Test
   public void testEndpoint() throws Exception {
     conf = new Configuration();
-    String endpoint = conf.getTrimmed(TEST_ENDPOINT, "");
+    String endpoint = conf.getTrimmed(
+        S3ATestConstants.CONFIGURATION_TEST_ENDPOINT, "");
     if (endpoint.isEmpty()) {
-      LOG.warn("Custom endpoint test skipped as " + TEST_ENDPOINT + "config " +
+      LOG.warn("Custom endpoint test skipped as " +
+          S3ATestConstants.CONFIGURATION_TEST_ENDPOINT + "config " +
           "setting was not detected");
     } else {
       conf.set(Constants.ENDPOINT, endpoint);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java
index 4543278..8432789 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryption.java
@@ -22,7 +22,6 @@ import com.amazonaws.services.s3.model.ObjectMetadata;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.apache.hadoop.io.IOUtils;
 import org.junit.Test;
 
 import java.io.IOException;
@@ -48,15 +47,9 @@ public class ITestS3AEncryption extends AbstractS3ATestBase {
   }
 
   private static final int[] SIZES = {
-      0, 1, 2, 3, 4, 5, 254, 255, 256, 257, 2 ^ 10 - 3, 2 ^ 11 - 2, 2 ^ 12 - 1
+      0, 1, 2, 3, 4, 5, 254, 255, 256, 257, 2 ^ 12 - 1
   };
 
-  @Override
-  public void teardown() throws Exception {
-    super.teardown();
-    IOUtils.closeStream(getFileSystem());
-  }
-
   @Test
   public void testEncryption() throws Throwable {
     for (int size: SIZES) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java
index 81578c2..96deb25 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmPropagation.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.fs.s3a;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
 import org.junit.Test;
 
 import java.io.IOException;
@@ -43,12 +42,6 @@ public class ITestS3AEncryptionAlgorithmPropagation
     return conf;
   }
 
-  @Override
-  public void teardown() throws Exception {
-    super.teardown();
-    IOUtils.closeStream(getFileSystem());
-  }
-
   @Test
   public void testEncrypt0() throws Throwable {
     writeThenReadFileToFailure(0);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java
index e284ea7..7cd1094 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java
@@ -18,13 +18,9 @@
 
 package org.apache.hadoop.fs.s3a;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.apache.hadoop.fs.contract.AbstractFSContractTestBase;
-import org.apache.hadoop.fs.contract.s3a.S3AContract;
 import org.apache.hadoop.test.LambdaTestUtils;
 
 import org.junit.Test;
@@ -41,15 +37,10 @@ import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
  * Test S3A Failure translation, including a functional test
  * generating errors during stream IO.
  */
-public class ITestS3AFailureHandling extends AbstractFSContractTestBase {
+public class ITestS3AFailureHandling extends AbstractS3ATestBase {
   private static final Logger LOG =
       LoggerFactory.getLogger(ITestS3AFailureHandling.class);
 
-  @Override
-  protected AbstractFSContract createContract(Configuration conf) {
-    return new S3AContract(conf);
-  }
-
   @Test
   public void testReadFileChanged() throws Throwable {
     describe("overwrite a file with a shorter one during a read, seek");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java
index f19ea95..7fb54b1 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java
@@ -18,13 +18,9 @@
 
 package org.apache.hadoop.fs.s3a;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.apache.hadoop.fs.contract.AbstractFSContractTestBase;
 import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.apache.hadoop.fs.contract.s3a.S3AContract;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -43,7 +39,7 @@ import static org.apache.hadoop.test.GenericTestUtils.getTestDir;
  * Use metrics to assert about the cost of file status queries.
  * {@link S3AFileSystem#getFileStatus(Path)}.
  */
-public class ITestS3AFileOperationCost extends AbstractFSContractTestBase {
+public class ITestS3AFileOperationCost extends AbstractS3ATestBase {
 
   private MetricDiff metadataRequests;
   private MetricDiff listRequests;
@@ -52,16 +48,6 @@ public class ITestS3AFileOperationCost extends AbstractFSContractTestBase {
       LoggerFactory.getLogger(ITestS3AFileOperationCost.class);
 
   @Override
-  protected AbstractFSContract createContract(Configuration conf) {
-    return new S3AContract(conf);
-  }
-
-  @Override
-  public S3AFileSystem getFileSystem() {
-    return (S3AFileSystem) super.getFileSystem();
-  }
-
-  @Override
   public void setup() throws Exception {
     super.setup();
     S3AFileSystem fs = getFileSystem();
@@ -246,7 +232,8 @@ public class ITestS3AFileOperationCost extends AbstractFSContractTestBase {
 
     int destDirDepth = directoriesInPath(destDir);
     directoriesCreated.assertDiffEquals(state, 1);
-/*  TODO: uncomment once HADOOP-13222 is in
+/*  TODO: uncomment once HADOOP-13222 "s3a.mkdirs() to delete empty fake parent directories"
+    is in
     deleteRequests.assertDiffEquals(state,1);
     directoriesDeleted.assertDiffEquals(state,0);
     fakeDirectoriesDeleted.assertDiffEquals(state,destDirDepth);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
index 858ac22..0eb601b 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.fs.s3a;
 
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.rules.TestName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -38,18 +41,44 @@ public class ITestS3AFileSystemContract extends FileSystemContractBaseTest {
   protected static final Logger LOG =
       LoggerFactory.getLogger(ITestS3AFileSystemContract.class);
 
+  private Path basePath;
+
+  @Rule
+  public TestName methodName = new TestName();
+
+  @Before
+  public void nameThread() {
+    Thread.currentThread().setName("JUnit-" + methodName.getMethodName());
+  }
+
   @Override
   public void setUp() throws Exception {
     Configuration conf = new Configuration();
 
     fs = S3ATestUtils.createTestFileSystem(conf);
+    basePath = fs.makeQualified(
+        S3ATestUtils.createTestPath(new Path("/s3afilesystemcontract")));
     super.setUp();
   }
 
+  /**
+   * This path explicitly places all absolute paths under the per-test suite
+   * path directory; this allows the test to run in parallel.
+   * @param pathString path string as input
+   * @return a qualified path string.
+   */
+  protected Path path(String pathString) {
+    if (pathString.startsWith("/")) {
+      return fs.makeQualified(new Path(basePath, pathString));
+    } else {
+      return super.path(pathString);
+    }
+  }
+
   @Override
   protected void tearDown() throws Exception {
     if (fs != null) {
-      fs.delete(path("test"), true);
+      fs.delete(basePath, true);
     }
     super.tearDown();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java
index 360a151..84aad3c 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements. See the NOTICE file
  * distributed with this work for additional information
@@ -28,9 +28,6 @@ import com.amazonaws.services.securitytoken.model.GetSessionTokenRequest;
 import com.amazonaws.services.securitytoken.model.GetSessionTokenResult;
 import com.amazonaws.services.securitytoken.model.Credentials;
 
-import org.apache.hadoop.fs.contract.AbstractFSContract;
-import org.apache.hadoop.fs.contract.AbstractFSContractTestBase;
-import org.apache.hadoop.fs.contract.s3a.S3AContract;
 import org.apache.hadoop.fs.s3native.S3xLoginHelper;
 import org.apache.hadoop.conf.Configuration;
 
@@ -48,9 +45,7 @@ import static org.apache.hadoop.fs.s3a.Constants.*;
  * should only be used against transient filesystems where you don't care about
  * the data.
  */
-public class ITestS3ATemporaryCredentials extends AbstractFSContractTestBase {
-  public static final String TEST_STS_ENABLED = "test.fs.s3a.sts.enabled";
-  public static final String TEST_STS_ENDPOINT = "test.fs.s3a.sts.endpoint";
+public class ITestS3ATemporaryCredentials extends AbstractS3ATestBase {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(ITestS3ATemporaryCredentials.class);
@@ -60,11 +55,6 @@ public class ITestS3ATemporaryCredentials extends AbstractFSContractTestBase {
 
   private static final long TEST_FILE_SIZE = 1024;
 
-  @Override
-  protected AbstractFSContract createContract(Configuration conf) {
-    return new S3AContract(conf);
-  }
-
   /**
    * Test use of STS for requesting temporary credentials.
    *

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java
index 6894bb0..8c22f47 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java
@@ -135,13 +135,32 @@ public interface S3ATestConstants {
   int DEFAULT_DIRECTORY_COUNT = 2;
 
   /**
-   * Default scale test timeout in seconds: {@value}.
+   * Default policy on scale tests: {@value}.
    */
-  int DEFAULT_TEST_TIMEOUT = 30 * 60;
+  boolean DEFAULT_SCALE_TESTS_ENABLED = false;
 
   /**
-   * Default policy on scale tests: {@value}.
+   * Fork ID passed down from maven if the test is running in parallel.
    */
-  boolean DEFAULT_SCALE_TESTS_ENABLED = false;
+  String TEST_UNIQUE_FORK_ID = "test.unique.fork.id";
+  String TEST_STS_ENABLED = "test.fs.s3a.sts.enabled";
+  String TEST_STS_ENDPOINT = "test.fs.s3a.sts.endpoint";
+
+  /**
+   * Timeout in Milliseconds for standard tests: {@value}.
+   */
+  int S3A_TEST_TIMEOUT = 10 * 60 * 1000;
+
+  /**
+   * Timeout in Seconds for Scale Tests: {@value}.
+   */
+  int SCALE_TEST_TIMEOUT_SECONDS = 30 * 60;
 
+  int SCALE_TEST_TIMEOUT_MILLIS = SCALE_TEST_TIMEOUT_SECONDS * 1000;
+  /**
+   * Optional custom endpoint for S3A configuration tests.
+   * This does <i>not</i> set the endpoint for s3 access elsewhere.
+   */
+  String CONFIGURATION_TEST_ENDPOINT =
+      "test.fs.s3a.endpoint";
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
index 809c6e3..462914c 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.fs.s3a;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.s3a.scale.S3AScaleTestBase;
 import org.junit.Assert;
 import org.junit.internal.AssumptionViolatedException;
@@ -59,7 +60,7 @@ public final class S3ATestUtils {
    */
   public static S3AFileSystem createTestFileSystem(Configuration conf)
       throws IOException {
-    return createTestFileSystem(conf, true);
+    return createTestFileSystem(conf, false);
   }
 
   /**
@@ -303,6 +304,19 @@ public final class S3ATestUtils {
   }
 
   /**
+   * Create a test path, using the value of
+   * {@link S3ATestConstants#TEST_UNIQUE_FORK_ID} if it is set.
+   * @param defVal default value
+   * @return a path
+   */
+  public static Path createTestPath(Path defVal) {
+    String testUniqueForkId = System.getProperty(
+        S3ATestConstants.TEST_UNIQUE_FORK_ID);
+    return testUniqueForkId == null ? defVal :
+        new Path("/" + testUniqueForkId, "test");
+  }
+
+  /**
    * Reset all metrics in a list.
    * @param metrics metrics to reset
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java
index a60d084..fcb6444 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.StorageStatistics;
 import org.apache.hadoop.fs.contract.ContractTestUtils;
 import org.apache.hadoop.fs.s3a.S3AFileStatus;
+import org.apache.hadoop.fs.s3a.S3AFileSystem;
 import org.apache.hadoop.fs.s3a.Statistic;
 import org.apache.hadoop.util.Progressable;
 
@@ -70,27 +71,22 @@ public abstract class AbstractSTestS3AHugeFiles extends S3AScaleTestBase {
   private int partitionSize;
 
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
-
+  public void setup() throws Exception {
+    super.setup();
     final Path testPath = getTestPath();
     scaleTestDir = new Path(testPath, "scale");
     hugefile = new Path(scaleTestDir, "hugefile");
     hugefileRenamed = new Path(scaleTestDir, "hugefileRenamed");
   }
 
-  @Override
-  public void tearDown() throws Exception {
-    // do nothing. Specifically: do not delete the test dir
-  }
 
   /**
    * Note that this can get called before test setup.
    * @return the configuration to use.
    */
   @Override
-  protected Configuration createConfiguration() {
-    Configuration conf = super.createConfiguration();
+  protected Configuration createScaleConfiguration() {
+    Configuration conf = super.createScaleConfiguration();
     partitionSize = (int)getTestPropertyBytes(conf,
         KEY_HUGE_PARTITION_SIZE,
         DEFAULT_PARTITION_SIZE);
@@ -155,6 +151,7 @@ public abstract class AbstractSTestS3AHugeFiles extends S3AScaleTestBase {
     // perform the upload.
     // there's lots of logging here, so that a tail -f on the output log
     // can give a view of what is happening.
+    S3AFileSystem fs = getFileSystem();
     StorageStatistics storageStatistics = fs.getStorageStatistics();
     String putRequests = Statistic.OBJECT_PUT_REQUESTS.getSymbol();
     String putBytes = Statistic.OBJECT_PUT_BYTES.getSymbol();
@@ -286,12 +283,13 @@ public abstract class AbstractSTestS3AHugeFiles extends S3AScaleTestBase {
   }
 
   void assumeHugeFileExists() throws IOException {
+    S3AFileSystem fs = getFileSystem();
     ContractTestUtils.assertPathExists(fs, "huge file not created", hugefile);
     ContractTestUtils.assertIsFile(fs, hugefile);
   }
 
   private void logFSState() {
-    LOG.info("File System state after operation:\n{}", fs);
+    LOG.info("File System state after operation:\n{}", getFileSystem());
   }
 
   @Test
@@ -305,6 +303,7 @@ public abstract class AbstractSTestS3AHugeFiles extends S3AScaleTestBase {
     }
     String filetype = encrypted ? "encrypted file" : "file";
     describe("Positioned reads of %s %s", filetype, hugefile);
+    S3AFileSystem fs = getFileSystem();
     S3AFileStatus status = fs.getFileStatus(hugefile);
     long filesize = status.getLen();
     int ops = 0;
@@ -344,6 +343,7 @@ public abstract class AbstractSTestS3AHugeFiles extends S3AScaleTestBase {
   public void test_050_readHugeFile() throws Throwable {
     assumeHugeFileExists();
     describe("Reading %s", hugefile);
+    S3AFileSystem fs = getFileSystem();
     S3AFileStatus status = fs.getFileStatus(hugefile);
     long filesize = status.getLen();
     long blocks = filesize / uploadBlockSize;
@@ -369,6 +369,7 @@ public abstract class AbstractSTestS3AHugeFiles extends S3AScaleTestBase {
   public void test_100_renameHugeFile() throws Throwable {
     assumeHugeFileExists();
     describe("renaming %s to %s", hugefile, hugefileRenamed);
+    S3AFileSystem fs = getFileSystem();
     S3AFileStatus status = fs.getFileStatus(hugefile);
     long filesize = status.getLen();
     fs.delete(hugefileRenamed, false);
@@ -396,7 +397,7 @@ public abstract class AbstractSTestS3AHugeFiles extends S3AScaleTestBase {
   public void test_999_DeleteHugeFiles() throws IOException {
     deleteHugeFile();
     ContractTestUtils.NanoTimer timer2 = new ContractTestUtils.NanoTimer();
-
+    S3AFileSystem fs = getFileSystem();
     fs.delete(hugefileRenamed, false);
     timer2.end("time to delete %s", hugefileRenamed);
     ContractTestUtils.rm(fs, getTestPath(), true, true);
@@ -405,7 +406,7 @@ public abstract class AbstractSTestS3AHugeFiles extends S3AScaleTestBase {
   protected void deleteHugeFile() throws IOException {
     describe("Deleting %s", hugefile);
     NanoTimer timer = new NanoTimer();
-    fs.delete(hugefile, false);
+    getFileSystem().delete(hugefile, false);
     timer.end("time to delete %s", hugefile);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java
index a375664..10dfa65 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java
@@ -20,9 +20,6 @@ package org.apache.hadoop.fs.s3a.scale;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.s3a.Constants;
-import org.junit.Test;
-
-import java.io.IOException;
 
 /**
  * Tests file deletion with multi-delete disabled.
@@ -30,15 +27,10 @@ import java.io.IOException;
 public class ITestS3ADeleteFilesOneByOne extends ITestS3ADeleteManyFiles {
 
   @Override
-  protected Configuration createConfiguration() {
-    Configuration configuration = super.createConfiguration();
+  protected Configuration createScaleConfiguration() {
+    Configuration configuration = super.createScaleConfiguration();
     configuration.setBoolean(Constants.ENABLE_MULTI_DELETE, false);
     return configuration;
   }
 
-  @Override
-  @Test
-  public void testOpenCreate() throws IOException {
-
-  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java
index 4e1a734..d4b6dd9 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.fs.s3a.scale;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.fs.s3a.S3AFileSystem;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -50,12 +52,12 @@ public class ITestS3ADeleteManyFiles extends S3AScaleTestBase {
    */
   @Test
   public void testBulkRenameAndDelete() throws Throwable {
-    final Path scaleTestDir = getTestPath();
+    final Path scaleTestDir = path("testBulkRenameAndDelete");
     final Path srcDir = new Path(scaleTestDir, "src");
     final Path finalDir = new Path(scaleTestDir, "final");
     final long count = getOperationCount();
+    final S3AFileSystem fs = getFileSystem();
     ContractTestUtils.rm(fs, scaleTestDir, true, false);
-
     fs.mkdirs(srcDir);
     fs.mkdirs(finalDir);
 
@@ -114,11 +116,4 @@ public class ITestS3ADeleteManyFiles extends S3AScaleTestBase {
     ContractTestUtils.assertDeleted(fs, finalDir, true, false);
   }
 
-  @Test
-  public void testOpenCreate() throws IOException {
-    final Path scaleTestDir = getTestPath();
-    final Path srcDir = new Path(scaleTestDir, "opencreate");
-    ContractTestUtils.createAndVerifyFile(fs, srcDir, 1024);
-    ContractTestUtils.createAndVerifyFile(fs, srcDir, 50 * 1024);
-  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java
index b5f4eb3..d71364f 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.fs.s3a.scale;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.s3a.S3AFileSystem;
 import org.apache.hadoop.fs.s3a.Statistic;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -40,8 +41,9 @@ public class ITestS3ADirectoryPerformance extends S3AScaleTestBase {
   @Test
   public void testListOperations() throws Throwable {
     describe("Test recursive list operations");
-    final Path scaleTestDir = getTestPath();
+    final Path scaleTestDir = path("testListOperations");
     final Path listDir = new Path(scaleTestDir, "lists");
+    S3AFileSystem fs = getFileSystem();
 
     // scale factor.
     int scale = getConf().getInt(KEY_DIRECTORY_COUNT, DEFAULT_DIRECTORY_COUNT);
@@ -137,15 +139,16 @@ public class ITestS3ADirectoryPerformance extends S3AScaleTestBase {
   @Test
   public void testTimeToStatEmptyDirectory() throws Throwable {
     describe("Time to stat an empty directory");
-    Path path = new Path(getTestPath(), "empty");
-    fs.mkdirs(path);
+    Path path = path("empty");
+    getFileSystem().mkdirs(path);
     timeToStatPath(path);
   }
 
   @Test
   public void testTimeToStatNonEmptyDirectory() throws Throwable {
     describe("Time to stat a non-empty directory");
-    Path path = new Path(getTestPath(), "dir");
+    Path path = path("dir");
+    S3AFileSystem fs = getFileSystem();
     fs.mkdirs(path);
     touch(fs, new Path(path, "file"));
     timeToStatPath(path);
@@ -154,8 +157,8 @@ public class ITestS3ADirectoryPerformance extends S3AScaleTestBase {
   @Test
   public void testTimeToStatFile() throws Throwable {
     describe("Time to stat a simple file");
-    Path path = new Path(getTestPath(), "file");
-    touch(fs, path);
+    Path path = path("file");
+    touch(getFileSystem(), path);
     timeToStatPath(path);
   }
 
@@ -167,6 +170,7 @@ public class ITestS3ADirectoryPerformance extends S3AScaleTestBase {
 
   private void timeToStatPath(Path path) throws IOException {
     describe("Timing getFileStatus(\"%s\")", path);
+    S3AFileSystem fs = getFileSystem();
     MetricDiff metadataRequests =
         new MetricDiff(fs, Statistic.OBJECT_METADATA_REQUESTS);
     MetricDiff listRequests =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesClassicOutput.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesClassicOutput.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesClassicOutput.java
index 45eef24..551956b 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesClassicOutput.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesClassicOutput.java
@@ -29,8 +29,8 @@ import org.apache.hadoop.fs.s3a.Constants;
 public class ITestS3AHugeFilesClassicOutput extends AbstractSTestS3AHugeFiles {
 
   @Override
-  protected Configuration createConfiguration() {
-    final Configuration conf = super.createConfiguration();
+  protected Configuration createScaleConfiguration() {
+    final Configuration conf = super.createScaleConfiguration();
     conf.setBoolean(Constants.FAST_UPLOAD, false);
     return conf;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java
index e2163c5..cc8187e 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AInputStreamPerformance.java
@@ -436,7 +436,8 @@ public class ITestS3AInputStreamPerformance extends S3AScaleTestBase {
     describe("read over a buffer, making sure that the requests" +
         " spans readahead ranges");
     int datasetLen = _32K;
-    Path dataFile = new Path(getTestPath(), "testReadOverBuffer.bin");
+    S3AFileSystem fs = getFileSystem();
+    Path dataFile = path("testReadOverBuffer.bin");
     byte[] sourceData = dataset(datasetLen, 0, 64);
     // relies on the field 'fs' referring to the R/W FS
     writeDataset(fs, dataFile, sourceData, datasetLen, _16K, true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java
index af6d468..c4174bf 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/S3AScaleTestBase.java
@@ -21,20 +21,15 @@ package org.apache.hadoop.fs.s3a.scale;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.contract.ContractTestUtils;
-import org.apache.hadoop.fs.s3a.S3AFileSystem;
+import org.apache.hadoop.fs.s3a.AbstractS3ATestBase;
 import org.apache.hadoop.fs.s3a.S3AInputStream;
 import org.apache.hadoop.fs.s3a.S3AInstrumentation;
 import org.apache.hadoop.fs.s3a.S3ATestConstants;
 import org.apache.hadoop.fs.s3a.Statistic;
 import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
-import org.junit.After;
+
 import org.junit.Assert;
 import org.junit.Assume;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.rules.TestName;
-import org.junit.rules.Timeout;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -45,25 +40,35 @@ import static org.apache.hadoop.fs.s3a.S3ATestUtils.*;
 /**
  * Base class for scale tests; here is where the common scale configuration
  * keys are defined.
+ * <p>
+ * Configuration setup is a bit more complex than in the parent classes,
+ * as the test timeout is desired prior to the {@link #getTestTimeoutMillis()}
+ * being called to set the test timeout rule; this happens before any of
+ * the methods tagged with {@code @Before} are invoked.
+ * <p>
+ * The algorithm is:
+ * <ol>
+ *   <li>Create a configuration on demand, via
+ *   {@link #demandCreateConfiguration()}</li>
+ *   <li>Have that return the value of {@link #conf} or create a new one
+ *   if that field is null (and set the field to the created value).</li>
+ *   <li>Override the superclasses {@link #createConfiguration()}
+ *   to return the demand created value; make that method final so that
+ *   subclasses don't break things by overridding it.</li>
+ *   <li>Add a new override point {@link #createScaleConfiguration()}
+ *   to create the config, one which subclasses can (and do) override.</li>
+ * </ol>
+ * Bear in mind that this process also takes place during initialization
+ * of the superclass; the overridden methods are being invoked before
+ * their instances are fully configured. This is considered
+ * <i>very bad form</i> in Java code (indeed, in C++ it is actually permitted;
+ * the base class implementations get invoked instead).
  */
-public class S3AScaleTestBase extends Assert implements S3ATestConstants {
-
-  @Rule
-  public final TestName methodName = new TestName();
-
-  @Rule
-  public Timeout testTimeout = createTestTimeout();
-
-  @Before
-  public void nameThread() {
-    Thread.currentThread().setName("JUnit");
-  }
+public class S3AScaleTestBase extends AbstractS3ATestBase {
 
   public static final int _1KB = 1024;
   public static final int _1MB = _1KB * _1KB;
 
-  protected S3AFileSystem fs;
-
   protected static final Logger LOG =
       LoggerFactory.getLogger(S3AScaleTestBase.class);
 
@@ -71,14 +76,8 @@ public class S3AScaleTestBase extends Assert implements S3ATestConstants {
 
   private boolean enabled;
 
-  /**
-   * Configuration generator. May be overridden to inject
-   * some custom options.
-   * @return a configuration with which to create FS instances
-   */
-  protected Configuration createConfiguration() {
-    return new Configuration();
-  }
+
+  private Path testPath;
 
   /**
    * Get the configuration used to set up the FS.
@@ -88,44 +87,53 @@ public class S3AScaleTestBase extends Assert implements S3ATestConstants {
     return conf;
   }
 
-  /**
-   * Setup. This triggers creation of the configuration.
-   */
-  @Before
-  public void setUp() throws Exception {
-    demandCreateConfiguration();
+  @Override
+  public void setup() throws Exception {
+    super.setup();
+    testPath = path("/tests3ascale");
     LOG.debug("Scale test operation count = {}", getOperationCount());
     // multipart purges are disabled on the scale tests
-    fs = createTestFileSystem(conf, false);
     // check for the test being enabled
     enabled = getTestPropertyBool(
         getConf(),
         KEY_SCALE_TESTS_ENABLED,
         DEFAULT_SCALE_TESTS_ENABLED);
     Assume.assumeTrue("Scale test disabled: to enable set property " +
-        KEY_SCALE_TESTS_ENABLED, enabled);
+        KEY_SCALE_TESTS_ENABLED, isEnabled());
   }
 
   /**
-   * Create the configuration if it is not already set up.
+   * Create the configuration if it is not already set up, calling
+   * {@link #createScaleConfiguration()} to do so.
    * @return the configuration.
    */
   private synchronized Configuration demandCreateConfiguration() {
     if (conf == null) {
-      conf = createConfiguration();
+      conf = createScaleConfiguration();
     }
     return conf;
   }
 
-  @After
-  public void tearDown() throws Exception {
-    ContractTestUtils.rm(fs, getTestPath(), true, true);
+  /**
+   * Returns the config created with {@link #demandCreateConfiguration()}.
+   * Subclasses must override {@link #createScaleConfiguration()}
+   * in order to customize their configurations.
+   * @return a configuration with which to create FS instances
+   */
+  protected final Configuration createConfiguration() {
+    return demandCreateConfiguration();
+  }
+
+  /**
+   * Override point: create a configuration.
+   * @return a configuration with which to create FS instances
+   */
+  protected Configuration createScaleConfiguration() {
+    return new Configuration();
   }
 
   protected Path getTestPath() {
-    String testUniqueForkId = System.getProperty("test.unique.fork.id");
-    return testUniqueForkId == null ? new Path("/tests3a") :
-        new Path("/" + testUniqueForkId, "tests3a");
+    return testPath;
   }
 
   protected long getOperationCount() {
@@ -133,34 +141,18 @@ public class S3AScaleTestBase extends Assert implements S3ATestConstants {
   }
 
   /**
-   * Create the timeout for tests. Some large tests may need a larger value.
-   * @return the test timeout to use
-   */
-  protected Timeout createTestTimeout() {
-    demandCreateConfiguration();
-    return new Timeout(
-        getTestTimeoutSeconds() * 1000);
-  }
-
-  /**
    * Get the test timeout in seconds.
    * @return the test timeout as set in system properties or the default.
    */
-  protected static int getTestTimeoutSeconds() {
-    return getTestPropertyInt(null,
+  protected int getTestTimeoutSeconds() {
+    return getTestPropertyInt(demandCreateConfiguration(),
         KEY_TEST_TIMEOUT,
-        DEFAULT_TEST_TIMEOUT);
+        SCALE_TEST_TIMEOUT_SECONDS);
   }
 
-  /**
-   * Describe a test in the logs.
-   * @param text text to print
-   * @param args arguments to format in the printing
-   */
-  protected void describe(String text, Object... args) {
-    LOG.info("\n\n{}: {}\n",
-        methodName.getMethodName(),
-        String.format(text, args));
+  @Override
+  protected int getTestTimeoutMillis() {
+    return getTestTimeoutSeconds() * 1000;
   }
 
   /**
@@ -189,20 +181,25 @@ public class S3AScaleTestBase extends Assert implements S3ATestConstants {
    * @return the value.
    */
   public long gaugeValue(Statistic statistic) {
-    S3AInstrumentation instrumentation = fs.getInstrumentation();
+    S3AInstrumentation instrumentation = getFileSystem().getInstrumentation();
     MutableGaugeLong gauge = instrumentation.lookupGauge(statistic.getSymbol());
     assertNotNull("No gauge " + statistic
         + " in " + instrumentation.dump("", " = ", "\n", true), gauge);
     return gauge.value();
   }
 
-  protected boolean isEnabled() {
+  /**
+   * Is the test enabled; this is controlled by the configuration
+   * and the {@code -Dscale} maven option.
+   * @return true if the scale tests are enabled.
+   */
+  protected final boolean isEnabled() {
     return enabled;
   }
 
   /**
-   * Flag to indicate that this test is being used sequentially. This
-   * is used by some of the scale tests to validate test time expectations.
+   * Flag to indicate that this test is being executed in parallel.
+   * This is used by some of the scale tests to validate test time expectations.
    * @return true if the build indicates this test is being run in parallel.
    */
   protected boolean isParallelExecution() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java
index ca57da6..7d2c1dc 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3A.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -59,7 +59,7 @@ public class ITestS3A {
   }
 
   protected Path getTestPath() {
-    return new Path("/tests3afc");
+    return S3ATestUtils.createTestPath(new Path("/tests3afc"));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67e01f72/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java
index 772d8c7..8421dad 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/yarn/ITestS3AMiniYarnCluster.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements. See the NOTICE file distributed with this
  * work for additional information regarding copyright ownership. The ASF
@@ -24,13 +24,13 @@ import java.util.Map;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.examples.WordCount;
 import org.apache.hadoop.fs.CreateFlag;
-import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.fs.s3a.AbstractS3ATestBase;
 import org.apache.hadoop.fs.s3a.S3AFileSystem;
 import org.apache.hadoop.fs.s3a.S3ATestUtils;
-import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
@@ -39,26 +39,26 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.MiniYARNCluster;
 
-import org.junit.After;
-import static org.junit.Assert.assertTrue;
-import org.junit.Before;
 import org.junit.Test;
-import static org.junit.Assert.assertEquals;
 
 /**
  * Tests that S3A is usable through a YARN application.
  */
-public class ITestS3AMiniYarnCluster {
+public class ITestS3AMiniYarnCluster extends AbstractS3ATestBase {
 
   private final Configuration conf = new YarnConfiguration();
   private S3AFileSystem fs;
   private MiniYARNCluster yarnCluster;
-  private final String rootPath = "/tests/MiniClusterWordCount/";
+  private Path rootPath;
 
-  @Before
-  public void beforeTest() throws IOException {
+  @Override
+  public void setup() throws Exception {
+    super.setup();
     fs = S3ATestUtils.createTestFileSystem(conf);
-    fs.mkdirs(new Path(rootPath + "input/"));
+    rootPath = path("MiniClusterWordCount");
+    Path workingDir = path("working");
+    fs.setWorkingDirectory(workingDir);
+    fs.mkdirs(new Path(rootPath, "input/"));
 
     yarnCluster = new MiniYARNCluster("MiniClusterWordCount", // testName
             1, // number of node managers
@@ -68,17 +68,19 @@ public class ITestS3AMiniYarnCluster {
     yarnCluster.start();
   }
 
-  @After
-  public void afterTest() throws IOException {
-    fs.delete(new Path(rootPath), true);
-    yarnCluster.stop();
+  @Override
+  public void teardown() throws Exception {
+    if (yarnCluster != null) {
+      yarnCluster.stop();
+    }
+    super.teardown();
   }
 
   @Test
   public void testWithMiniCluster() throws Exception {
-    Path input = new Path(rootPath + "input/in.txt");
+    Path input = new Path(rootPath, "input/in.txt");
     input = input.makeQualified(fs.getUri(), fs.getWorkingDirectory());
-    Path output = new Path(rootPath + "output/");
+    Path output = new Path(rootPath, "output/");
     output = output.makeQualified(fs.getUri(), fs.getWorkingDirectory());
 
     writeStringToFile(input, "first line\nsecond line\nthird line");
@@ -134,15 +136,9 @@ public class ITestS3AMiniYarnCluster {
   /**
    * helper method.
    */
-  private String readStringFromFile(Path path) {
-    try (FSDataInputStream in = fs.open(path)) {
-      long bytesLen = fs.getFileStatus(path).getLen();
-      byte[] buffer = new byte[(int) bytesLen];
-      IOUtils.readFully(in, buffer, 0, buffer.length);
-      return new String(buffer);
-    } catch (IOException e) {
-      throw new RuntimeException("Failed to read from [" + path + "]", e);
-    }
+  private String readStringFromFile(Path path) throws IOException {
+    return ContractTestUtils.readBytesToString(fs, path,
+        (int) fs.getFileStatus(path).getLen());
   }
 
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org