You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ae...@apache.org on 2017/09/18 22:11:49 UTC

[06/50] [abbrv] hadoop git commit: HADOOP-14738 Remove S3N and obsolete bits of S3A; rework docs. Contributed by Steve Loughran.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/49467165/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java
deleted file mode 100644
index c082493..0000000
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/InMemoryNativeFileSystemStore.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3native;
-
-import static org.apache.hadoop.fs.s3native.NativeS3FileSystem.PATH_DELIMITER;
-import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_BUFFER_DIR_KEY;
-import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.addDeprecatedConfigKeys;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.SortedMap;
-import java.util.SortedSet;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.util.Time;
-
-/**
- * <p>
- * A stub implementation of {@link NativeFileSystemStore} for testing
- * {@link NativeS3FileSystem} without actually connecting to S3.
- * </p>
- */
-public class InMemoryNativeFileSystemStore implements NativeFileSystemStore {
-
-  static {
-    // Add the deprecated config keys
-    addDeprecatedConfigKeys();
-  }
-
-  private Configuration conf;
-  
-  private SortedMap<String, FileMetadata> metadataMap =
-    new TreeMap<String, FileMetadata>();
-  private SortedMap<String, byte[]> dataMap = new TreeMap<String, byte[]>();
-
-  @Override
-  public void initialize(URI uri, Configuration conf) throws IOException {
-    this.conf = conf;
-  }
-
-  @Override
-  public void storeEmptyFile(String key) throws IOException {
-    metadataMap.put(key, new FileMetadata(key, 0, Time.now()));
-    dataMap.put(key, new byte[0]);
-  }
-
-  @Override
-  public void storeFile(String key, File file, byte[] md5Hash)
-    throws IOException {
-    
-    ByteArrayOutputStream out = new ByteArrayOutputStream();
-    byte[] buf = new byte[8192];
-    int numRead;
-    BufferedInputStream in = null;
-    try {
-      in = new BufferedInputStream(new FileInputStream(file));
-      while ((numRead = in.read(buf)) >= 0) {
-        out.write(buf, 0, numRead);
-      }
-    } finally {
-      if (in != null) {
-        in.close();
-      }
-    }
-    metadataMap.put(key,
-        new FileMetadata(key, file.length(), Time.now()));
-    dataMap.put(key, out.toByteArray());
-  }
-
-  @Override
-  public InputStream retrieve(String key) throws IOException {
-    return retrieve(key, 0);
-  }
-  
-  @Override
-  public InputStream retrieve(String key, long byteRangeStart)
-    throws IOException {
-    
-    byte[] data = dataMap.get(key);
-    File file = createTempFile();
-    BufferedOutputStream out = null;
-    try {
-      out = new BufferedOutputStream(new FileOutputStream(file));
-      out.write(data, (int) byteRangeStart,
-          data.length - (int) byteRangeStart);
-    } finally {
-      if (out != null) {
-        out.close();
-      }
-    }
-    return new FileInputStream(file);
-  }
-  
-  private File createTempFile() throws IOException {
-    File dir = new File(conf.get(S3_NATIVE_BUFFER_DIR_KEY));
-    if (!dir.exists() && !dir.mkdirs()) {
-      throw new IOException("Cannot create S3 buffer directory: " + dir);
-    }
-    File result = File.createTempFile("test-", ".tmp", dir);
-    result.deleteOnExit();
-    return result;
-  }
-
-  @Override
-  public FileMetadata retrieveMetadata(String key) throws IOException {
-    return metadataMap.get(key);
-  }
-
-  @Override
-  public PartialListing list(String prefix, int maxListingLength)
-      throws IOException {
-    return list(prefix, maxListingLength, null, false);
-  }
-
-  @Override
-  public PartialListing list(String prefix, int maxListingLength,
-      String priorLastKey, boolean recursive) throws IOException {
-
-    return list(prefix, recursive ? null : PATH_DELIMITER, maxListingLength, priorLastKey);
-  }
-
-  private PartialListing list(String prefix, String delimiter,
-      int maxListingLength, String priorLastKey) throws IOException {
-
-    if (prefix.length() > 0 && !prefix.endsWith(PATH_DELIMITER)) {
-      prefix += PATH_DELIMITER;
-    }
-    
-    List<FileMetadata> metadata = new ArrayList<FileMetadata>();
-    SortedSet<String> commonPrefixes = new TreeSet<String>();
-    for (String key : dataMap.keySet()) {
-      if (key.startsWith(prefix)) {
-        if (delimiter == null) {
-          metadata.add(retrieveMetadata(key));
-        } else {
-          int delimIndex = key.indexOf(delimiter, prefix.length());
-          if (delimIndex == -1) {
-            metadata.add(retrieveMetadata(key));
-          } else {
-            String commonPrefix = key.substring(0, delimIndex);
-            commonPrefixes.add(commonPrefix);
-          }
-        }
-      }
-      if (metadata.size() + commonPrefixes.size() == maxListingLength) {
-        new PartialListing(key, metadata.toArray(new FileMetadata[0]),
-            commonPrefixes.toArray(new String[0]));
-      }
-    }
-    return new PartialListing(null, metadata.toArray(new FileMetadata[0]),
-        commonPrefixes.toArray(new String[0]));
-  }
-
-  @Override
-  public void delete(String key) throws IOException {
-    metadataMap.remove(key);
-    dataMap.remove(key);
-  }
-
-  @Override
-  public void copy(String srcKey, String dstKey) throws IOException {
-    metadataMap.put(dstKey, metadataMap.get(srcKey));
-    dataMap.put(dstKey, dataMap.get(srcKey));
-  }
-  
-  @Override
-  public void purge(String prefix) throws IOException {
-    Iterator<Entry<String, FileMetadata>> i =
-      metadataMap.entrySet().iterator();
-    while (i.hasNext()) {
-      Entry<String, FileMetadata> entry = i.next();
-      if (entry.getKey().startsWith(prefix)) {
-        dataMap.remove(entry.getKey());
-        i.remove();
-      }
-    }
-  }
-
-  @Override
-  public void dump() throws IOException {
-    System.out.println(metadataMap.values());
-    System.out.println(dataMap.keySet());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/49467165/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java
deleted file mode 100644
index bfbca71..0000000
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java
+++ /dev/null
@@ -1,266 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3native;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystemContractBaseTest;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.s3native.NativeS3FileSystem.NativeS3FsInputStream;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.internal.AssumptionViolatedException;
-import static org.junit.Assert.*;
-
-public abstract class NativeS3FileSystemContractBaseTest
-  extends FileSystemContractBaseTest {
-  public static final String KEY_TEST_FS = "test.fs.s3n.name";
-  private NativeFileSystemStore store;
-  
-  abstract NativeFileSystemStore getNativeFileSystemStore() throws IOException;
-
-  @Before
-  public void setUp() throws Exception {
-    Configuration conf = new Configuration();
-    String fsname = conf.get(KEY_TEST_FS);
-    if (StringUtils.isEmpty(fsname)) {
-      throw new AssumptionViolatedException(
-          "No test FS defined in :" + KEY_TEST_FS);
-    }
-    store = getNativeFileSystemStore();
-    fs = new NativeS3FileSystem(store);
-    fs.initialize(URI.create(fsname), conf);
-  }
-  
-  @After
-  public void tearDown() throws Exception {
-    if (store != null) {
-      store.purge("test");
-    }
-  }
-
-  @Test
-  public void testCanonicalName() throws Exception {
-    assertNull("s3n doesn't support security token and shouldn't have canonical name",
-               fs.getCanonicalServiceName());
-  }
-
-  @Test
-  public void testListStatusForRoot() throws Exception {
-    FileStatus[] paths = fs.listStatus(path("/"));
-    assertEquals("Root directory is not empty; ", 0, paths.length);
-    
-    Path testDir = path("/test");
-    assertTrue(fs.mkdirs(testDir));
-    
-    paths = fs.listStatus(path("/"));
-    assertEquals(1, paths.length);
-    assertEquals(path("/test"), paths[0].getPath());
-  }
-
-  @Test
-  public void testNoTrailingBackslashOnBucket() throws Exception {
-    assertTrue(fs.getFileStatus(new Path(fs.getUri().toString())).isDirectory());
-  }
-
-  private void createTestFiles(String base) throws IOException {
-    store.storeEmptyFile(base + "/file1");
-    store.storeEmptyFile(base + "/dir/file2");
-    store.storeEmptyFile(base + "/dir/file3");
-  }
-
-  @Test
-  public void testDirWithDifferentMarkersWorks() throws Exception {
-
-    for (int i = 0; i <= 3; i++) {
-      String base = "test/hadoop" + i;
-      Path path = path("/" + base);
-
-      createTestFiles(base);
-
-      if (i == 0 ) {
-        //do nothing, we are testing correctness with no markers
-      }
-      else if (i == 1) {
-        // test for _$folder$ marker
-        store.storeEmptyFile(base + "_$folder$");
-        store.storeEmptyFile(base + "/dir_$folder$");
-      }
-      else if (i == 2) {
-        // test the end slash file marker
-        store.storeEmptyFile(base + "/");
-        store.storeEmptyFile(base + "/dir/");
-      }
-      else if (i == 3) {
-        // test both markers
-        store.storeEmptyFile(base + "_$folder$");
-        store.storeEmptyFile(base + "/dir_$folder$");
-        store.storeEmptyFile(base + "/");
-        store.storeEmptyFile(base + "/dir/");
-      }
-
-      assertTrue(fs.getFileStatus(path).isDirectory());
-      assertEquals(2, fs.listStatus(path).length);
-    }
-  }
-
-  @Test
-  public void testDeleteWithNoMarker() throws Exception {
-    String base = "test/hadoop";
-    Path path = path("/" + base);
-
-    createTestFiles(base);
-
-    fs.delete(path, true);
-
-    path = path("/test");
-    assertTrue(fs.getFileStatus(path).isDirectory());
-    assertEquals(0, fs.listStatus(path).length);
-  }
-
-  @Test
-  public void testRenameWithNoMarker() throws Exception {
-    String base = "test/hadoop";
-    Path dest = path("/test/hadoop2");
-
-    createTestFiles(base);
-
-    fs.rename(path("/" + base), dest);
-
-    Path path = path("/test");
-    assertTrue(fs.getFileStatus(path).isDirectory());
-    assertEquals(1, fs.listStatus(path).length);
-    assertTrue(fs.getFileStatus(dest).isDirectory());
-    assertEquals(2, fs.listStatus(dest).length);
-  }
-
-  @Test
-  public void testEmptyFile() throws Exception {
-    store.storeEmptyFile("test/hadoop/file1");
-    fs.open(path("/test/hadoop/file1")).close();
-  }
-
-  @Test
-  public void testBlockSize() throws Exception {
-    Path file = path("/test/hadoop/file");
-    createFile(file);
-    assertEquals("Default block size", fs.getDefaultBlockSize(file),
-    fs.getFileStatus(file).getBlockSize());
-
-    // Block size is determined at read time
-    long newBlockSize = fs.getDefaultBlockSize(file) * 2;
-    fs.getConf().setLong("fs.s3n.block.size", newBlockSize);
-    assertEquals("Double default block size", newBlockSize,
-    fs.getFileStatus(file).getBlockSize());
-  }
-
-  @Test
-  public void testRetryOnIoException() throws Exception {
-    class TestInputStream extends InputStream {
-      boolean shouldThrow = true;
-      int throwCount = 0;
-      int pos = 0;
-      byte[] bytes;
-      boolean threwException = false;
-      
-      public TestInputStream() {
-        bytes = new byte[256];
-        for (int i = pos; i < 256; i++) {
-          bytes[i] = (byte)i;
-        }
-      }
-      
-      @Override
-      public int read() throws IOException {
-        shouldThrow = !shouldThrow;
-        if (shouldThrow) {
-          throwCount++;
-          threwException = true;
-          throw new IOException();
-        }
-        assertFalse("IOException was thrown. InputStream should be reopened", threwException);
-        return pos++;
-      }
-      
-      @Override
-      public int read(byte[] b, int off, int len) throws IOException {
-        shouldThrow = !shouldThrow;
-        if (shouldThrow) {
-          throwCount++;
-          threwException = true;
-          throw new IOException();
-        }
-        assertFalse("IOException was thrown. InputStream should be reopened", threwException);
-        int sizeToRead = Math.min(len, 256 - pos);
-        for (int i = 0; i < sizeToRead; i++) {
-          b[i] = bytes[pos + i];
-        }
-        pos += sizeToRead;
-        return sizeToRead;
-      }
-
-      public void reopenAt(long byteRangeStart) {
-        threwException = false;
-        pos = Long.valueOf(byteRangeStart).intValue();
-      }
-
-    }
-    
-    final TestInputStream is = new TestInputStream();
-    
-    class MockNativeFileSystemStore extends Jets3tNativeFileSystemStore {
-      @Override
-      public InputStream retrieve(String key, long byteRangeStart) throws IOException {
-        is.reopenAt(byteRangeStart);
-        return is;
-      }
-    }
-    
-    NativeS3FsInputStream stream = new NativeS3FsInputStream(new MockNativeFileSystemStore(), null, is, "");
-    
-    // Test reading methods.
-    byte[] result = new byte[256];
-    for (int i = 0; i < 128; i++) {
-      result[i] = (byte)stream.read();
-    }
-    for (int i = 128; i < 256; i += 8) {
-      byte[] temp = new byte[8];
-      int read = stream.read(temp, 0, 8);
-      assertEquals(8, read);
-      System.arraycopy(temp, 0, result, i, 8);
-    }
-    
-    // Assert correct
-    for (int i = 0; i < 256; i++) {
-      assertEquals((byte)i, result[i]);
-    }
-    
-    // Test to make sure the throw path was exercised.
-    // every read should have thrown 1 IOException except for the first read
-    // 144 = 128 - 1 + (128 / 8)
-    assertEquals(143, ((TestInputStream)is).throwCount);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/49467165/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/S3NInMemoryFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/S3NInMemoryFileSystem.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/S3NInMemoryFileSystem.java
deleted file mode 100644
index c0ea85b..0000000
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/S3NInMemoryFileSystem.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3native;
-
-import org.apache.hadoop.fs.s3native.NativeS3FileSystem;
-import org.apache.hadoop.fs.s3native.InMemoryNativeFileSystemStore;
-
-/**
- * A helper implementation of {@link NativeS3FileSystem}
- * without actually connecting to S3 for unit testing.
- */
-public class S3NInMemoryFileSystem extends NativeS3FileSystem {
-    public S3NInMemoryFileSystem() {
-        super(new InMemoryNativeFileSystemStore());
-    }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/49467165/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestS3Credentials.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestS3Credentials.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestS3Credentials.java
deleted file mode 100644
index 17b78c7..0000000
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestS3Credentials.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.fs.s3native;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.ProviderUtils;
-import org.apache.hadoop.security.alias.CredentialProvider;
-import org.apache.hadoop.security.alias.CredentialProviderFactory;
-
-import java.io.File;
-import java.net.URI;
-
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-import org.junit.rules.TestName;
-
-import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_AWS_ACCESS_KEY_ID;
-import static org.apache.hadoop.fs.s3native.S3NativeFileSystemConfigKeys.S3_NATIVE_AWS_SECRET_ACCESS_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-/**
- * This is to test the {@link S3Credentials} class for extracting AWS
- * credentials.
- */
-public class TestS3Credentials {
-  public static final Log LOG = LogFactory.getLog(TestS3Credentials.class);
-
-  @Rule
-  public final TestName test = new TestName();
-
-  @Before
-  public void announce() {
-    LOG.info("Running test " + test.getMethodName());
-  }
-
-  private static final String EXAMPLE_ID = "AKASOMEACCESSKEY";
-  private static final String EXAMPLE_KEY =
-      "RGV0cm9pdCBSZ/WQgY2xl/YW5lZCB1cAEXAMPLE";
-
-  @Test
-  public void testInvalidHostnameWithUnderscores() throws Exception {
-    S3Credentials s3Credentials = new S3Credentials();
-    try {
-      s3Credentials.initialize(new URI("s3n://a:b@c_d"), new Configuration());
-      fail("Should throw IllegalArgumentException");
-    } catch (IllegalArgumentException e) {
-      assertEquals("Invalid hostname in URI s3n://a:b@c_d", e.getMessage());
-    }
-  }
-
-  @Test
-  public void testPlaintextConfigPassword() throws Exception {
-    S3Credentials s3Credentials = new S3Credentials();
-    Configuration conf = new Configuration();
-    conf.set(S3_NATIVE_AWS_ACCESS_KEY_ID, EXAMPLE_ID);
-    conf.set(S3_NATIVE_AWS_SECRET_ACCESS_KEY, EXAMPLE_KEY);
-    s3Credentials.initialize(new URI("s3n://foobar"), conf);
-    assertEquals("Could not retrieve proper access key", EXAMPLE_ID,
-        s3Credentials.getAccessKey());
-    assertEquals("Could not retrieve proper secret", EXAMPLE_KEY,
-        s3Credentials.getSecretAccessKey());
-  }
-
-  @Test
-  public void testPlaintextConfigPasswordWithWhitespace() throws Exception {
-    S3Credentials s3Credentials = new S3Credentials();
-    Configuration conf = new Configuration();
-    conf.set(S3_NATIVE_AWS_ACCESS_KEY_ID, "\r\n " + EXAMPLE_ID +
-        " \r\n");
-    conf.set(S3_NATIVE_AWS_SECRET_ACCESS_KEY, "\r\n " + EXAMPLE_KEY +
-        " \r\n");
-    s3Credentials.initialize(new URI("s3n://foobar"), conf);
-    assertEquals("Could not retrieve proper access key", EXAMPLE_ID,
-        s3Credentials.getAccessKey());
-    assertEquals("Could not retrieve proper secret", EXAMPLE_KEY,
-        s3Credentials.getSecretAccessKey());
-  }
-
-  @Rule
-  public final TemporaryFolder tempDir = new TemporaryFolder();
-
-  @Test
-  public void testCredentialProvider() throws Exception {
-    // set up conf to have a cred provider
-    final Configuration conf = new Configuration();
-    final File file = tempDir.newFile("test.jks");
-    final URI jks = ProviderUtils.nestURIForLocalJavaKeyStoreProvider(
-        file.toURI());
-    conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
-        jks.toString());
-
-    // add our creds to the provider
-    final CredentialProvider provider =
-        CredentialProviderFactory.getProviders(conf).get(0);
-    provider.createCredentialEntry(S3_NATIVE_AWS_SECRET_ACCESS_KEY,
-        EXAMPLE_KEY.toCharArray());
-    provider.flush();
-
-    // make sure S3Creds can retrieve things.
-    S3Credentials s3Credentials = new S3Credentials();
-    conf.set(S3_NATIVE_AWS_ACCESS_KEY_ID, EXAMPLE_ID);
-    s3Credentials.initialize(new URI("s3n://foobar"), conf);
-    assertEquals("Could not retrieve proper access key", EXAMPLE_ID,
-        s3Credentials.getAccessKey());
-    assertEquals("Could not retrieve proper secret", EXAMPLE_KEY,
-        s3Credentials.getSecretAccessKey());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/49467165/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestS3NInMemoryFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestS3NInMemoryFileSystem.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestS3NInMemoryFileSystem.java
deleted file mode 100644
index b457df2..0000000
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/TestS3NInMemoryFileSystem.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs.s3native;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URI;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.Path;
-
-public class TestS3NInMemoryFileSystem extends TestCase {
-
-  private static final String TEST_PATH = "s3n://test/data.txt";
-  
-  private static final String TEST_DATA = "Sample data for testing.";
-  
-  private S3NInMemoryFileSystem fs;
-  
-  @Override
-  public void setUp() throws IOException {
-    fs = new S3NInMemoryFileSystem();
-    fs.initialize(URI.create("s3n://test/"), new Configuration());
-  }
- 
-  public void testBasicReadWriteIO() throws IOException {
-    FSDataOutputStream writeData = fs.create(new Path(TEST_PATH));
-    writeData.write(TEST_DATA.getBytes());
-    writeData.flush();
-    writeData.close();
-    
-    FSDataInputStream readData = fs.open(new Path(TEST_PATH));
-    BufferedReader br = new BufferedReader(new InputStreamReader(readData));
-    String line = "";
-    StringBuffer stringBuffer = new StringBuffer();
-    while ((line = br.readLine()) != null) {
-        stringBuffer.append(line);
-    }
-    br.close();
-    
-    assert(TEST_DATA.equals(stringBuffer.toString()));
-  }
-  
-  @Override
-  public void tearDown() throws IOException {
-    fs.close();  
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/49467165/hadoop-tools/hadoop-aws/src/test/resources/contract/s3n.xml
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/resources/contract/s3n.xml b/hadoop-tools/hadoop-aws/src/test/resources/contract/s3n.xml
deleted file mode 100644
index 0c6b8c6..0000000
--- a/hadoop-tools/hadoop-aws/src/test/resources/contract/s3n.xml
+++ /dev/null
@@ -1,110 +0,0 @@
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~  or more contributor license agreements.  See the NOTICE file
-  ~  distributed with this work for additional information
-  ~  regarding copyright ownership.  The ASF licenses this file
-  ~  to you under the Apache License, Version 2.0 (the
-  ~  "License"); you may not use this file except in compliance
-  ~  with the License.  You may obtain a copy of the License at
-  ~
-  ~       http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~  Unless required by applicable law or agreed to in writing, software
-  ~  distributed under the License is distributed on an "AS IS" BASIS,
-  ~  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~  See the License for the specific language governing permissions and
-  ~  limitations under the License.
-  -->
-
-<configuration>
-  <!--
-  S3N is a blobstore, with very different behavior than a
-  classic filesystem.
-  -->
-
-  <property>
-    <name>fs.contract.test.root-tests-enabled</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.contract.test.random-seek-count</name>
-    <value>10</value>
-  </property>
-
-  <property>
-    <name>fs.contract.is-blobstore</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.contract.create-overwrites-directory</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.contract.create-visibility-delayed</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.contract.is-case-sensitive</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.contract.rename-returns-false-if-source-missing</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.contract.supports-append</name>
-    <value>false</value>
-  </property>
-
-  <property>
-    <name>fs.contract.supports-atomic-directory-delete</name>
-    <value>false</value>
-  </property>
-
-  <property>
-    <name>fs.contract.supports-atomic-rename</name>
-    <value>false</value>
-  </property>
-
-  <property>
-    <name>fs.contract.supports-block-locality</name>
-    <value>false</value>
-  </property>
-
-  <property>
-    <name>fs.contract.supports-concat</name>
-    <value>false</value>
-  </property>
-
-  <property>
-    <name>fs.contract.supports-seek</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.contract.supports-seek-on-closed-file</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.contract.rejects-seek-past-eof</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.contract.supports-strict-exceptions</name>
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>fs.contract.supports-unix-permissions</name>
-    <value>false</value>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/49467165/hadoop-tools/hadoop-aws/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/resources/log4j.properties b/hadoop-tools/hadoop-aws/src/test/resources/log4j.properties
index 9376ebd..acbe7f1 100644
--- a/hadoop-tools/hadoop-aws/src/test/resources/log4j.properties
+++ b/hadoop-tools/hadoop-aws/src/test/resources/log4j.properties
@@ -32,3 +32,6 @@ log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
 # Log all HTTP requests made; includes S3 interaction. This may
 # include sensitive information such as account IDs in HTTP headers.
 #log4j.logger.com.amazonaws.request=DEBUG
+
+# Turn on low level HTTP protocol debugging
+#log4j.logger.com.amazonaws.thirdparty.apache.http=DEBUG


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org