You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by jh...@apache.org on 2017/09/28 02:55:09 UTC

[18/50] [abbrv] hadoop git commit: HADOOP-14220 Enhance S3GuardTool with bucket-info and set-capacity commands, tests. Contributed by Steve Loughran

http://git-wip-us.apache.org/repos/asf/hadoop/blob/47011d7d/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardToolLocal.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardToolLocal.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardToolLocal.java
index 181cdfb..43cbe93 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardToolLocal.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardToolLocal.java
@@ -21,11 +21,12 @@ package org.apache.hadoop.fs.s3a.s3guard;
 import java.io.BufferedReader;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStreamReader;
-import java.io.PrintStream;
 import java.util.HashSet;
 import java.util.Set;
+import java.util.concurrent.Callable;
 
 import org.junit.Test;
 
@@ -34,13 +35,16 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.s3a.S3AFileSystem;
 import org.apache.hadoop.fs.s3a.s3guard.S3GuardTool.Diff;
 
-import static org.apache.hadoop.fs.s3a.s3guard.S3GuardTool.SUCCESS;
+import static org.apache.hadoop.fs.s3a.s3guard.S3GuardTool.*;
+import static org.apache.hadoop.test.LambdaTestUtils.intercept;
 
 /**
  * Test S3Guard related CLI commands against a LocalMetadataStore.
  */
 public class ITestS3GuardToolLocal extends AbstractS3GuardToolTestBase {
 
+  private static final String LOCAL_METADATA = "local://metadata";
+
   @Override
   protected MetadataStore newMetadataStore() {
     return new LocalMetadataStore();
@@ -65,10 +69,7 @@ public class ITestS3GuardToolLocal extends AbstractS3GuardToolTestBase {
 
     S3GuardTool.Import cmd = new S3GuardTool.Import(fs.getConf());
     cmd.setStore(ms);
-
-    expectSuccess("Import command did not exit successfully - see output",
-        cmd,
-        "import", parent.toString());
+    exec(cmd, "import", parent.toString());
 
     DirListingMetadata children =
         ms.listChildren(dir);
@@ -80,7 +81,7 @@ public class ITestS3GuardToolLocal extends AbstractS3GuardToolTestBase {
   }
 
   @Test
-  public void testDiffCommand() throws IOException {
+  public void testDiffCommand() throws Exception {
     S3AFileSystem fs = getFileSystem();
     MetadataStore ms = getMetadataStore();
     Set<Path> filesOnS3 = new HashSet<>(); // files on S3.
@@ -108,13 +109,10 @@ public class ITestS3GuardToolLocal extends AbstractS3GuardToolTestBase {
     }
 
     ByteArrayOutputStream buf = new ByteArrayOutputStream();
-    PrintStream out = new PrintStream(buf);
     Diff cmd = new Diff(fs.getConf());
     cmd.setStore(ms);
-    assertEquals("Diff command did not exit successfully - see output", SUCCESS,
-        cmd.run(new String[]{"diff", "-meta", "local://metadata",
-            testPath.toString()}, out));
-    out.close();
+    exec(cmd, buf, "diff", "-meta", LOCAL_METADATA,
+            testPath.toString());
 
     Set<Path> actualOnS3 = new HashSet<>();
     Set<Path> actualOnMS = new HashSet<>();
@@ -140,10 +138,128 @@ public class ITestS3GuardToolLocal extends AbstractS3GuardToolTestBase {
         }
       }
     }
-    String actualOut = out.toString();
+    String actualOut = buf.toString();
     assertEquals("Mismatched metadata store outputs: " + actualOut,
         filesOnMS, actualOnMS);
     assertEquals("Mismatched s3 outputs: " + actualOut, filesOnS3, actualOnS3);
     assertFalse("Diff contained duplicates", duplicates);
   }
+
+  @Test
+  public void testDestroyBucketExistsButNoTable() throws Throwable {
+    run(Destroy.NAME,
+        "-meta", LOCAL_METADATA,
+        getLandsatCSVFile());
+  }
+
+  @Test
+  public void testImportNoFilesystem() throws Throwable {
+    final Import importer =
+        new S3GuardTool.Import(getConfiguration());
+    importer.setStore(getMetadataStore());
+    intercept(IOException.class,
+        new Callable<Integer>() {
+          @Override
+          public Integer call() throws Exception {
+            return importer.run(
+                new String[]{
+                    "import",
+                    "-meta", LOCAL_METADATA,
+                    S3A_THIS_BUCKET_DOES_NOT_EXIST
+                });
+          }
+        });
+  }
+
+  @Test
+  public void testInfoBucketAndRegionNoFS() throws Throwable {
+    intercept(FileNotFoundException.class,
+        new Callable<Integer>() {
+          @Override
+          public Integer call() throws Exception {
+            return run(BucketInfo.NAME, "-meta",
+                LOCAL_METADATA, "-region",
+                "any-region", S3A_THIS_BUCKET_DOES_NOT_EXIST);
+          }
+        });
+  }
+
+  @Test
+  public void testInitNegativeRead() throws Throwable {
+    runToFailure(INVALID_ARGUMENT,
+        Init.NAME, "-meta", LOCAL_METADATA, "-region",
+        "eu-west-1",
+        READ_FLAG, "-10");
+  }
+
+  @Test
+  public void testInit() throws Throwable {
+    run(Init.NAME,
+        "-meta", LOCAL_METADATA,
+        "-region", "us-west-1");
+  }
+
+  @Test
+  public void testInitTwice() throws Throwable {
+    run(Init.NAME,
+        "-meta", LOCAL_METADATA,
+        "-region", "us-west-1");
+    run(Init.NAME,
+        "-meta", LOCAL_METADATA,
+        "-region", "us-west-1");
+  }
+
+  @Test
+  public void testLandsatBucketUnguarded() throws Throwable {
+    run(BucketInfo.NAME,
+        "-" + BucketInfo.UNGUARDED_FLAG,
+        getLandsatCSVFile());
+  }
+
+  @Test
+  public void testLandsatBucketRequireGuarded() throws Throwable {
+    runToFailure(E_BAD_STATE,
+        BucketInfo.NAME,
+        "-" + BucketInfo.GUARDED_FLAG,
+        ITestS3GuardToolLocal.this.getLandsatCSVFile());
+  }
+
+  @Test
+  public void testLandsatBucketRequireUnencrypted() throws Throwable {
+    run(BucketInfo.NAME,
+        "-" + BucketInfo.ENCRYPTION_FLAG, "none",
+        getLandsatCSVFile());
+  }
+
+  @Test
+  public void testLandsatBucketRequireEncrypted() throws Throwable {
+    runToFailure(E_BAD_STATE,
+        BucketInfo.NAME,
+        "-" + BucketInfo.ENCRYPTION_FLAG,
+        "AES256", ITestS3GuardToolLocal.this.getLandsatCSVFile());
+  }
+
+  @Test
+  public void testStoreInfo() throws Throwable {
+    S3GuardTool.BucketInfo cmd = new S3GuardTool.BucketInfo(
+        getFileSystem().getConf());
+    cmd.setStore(getMetadataStore());
+    String output = exec(cmd, cmd.getName(),
+        "-" + S3GuardTool.BucketInfo.GUARDED_FLAG,
+        getFileSystem().getUri().toString());
+    LOG.info("Exec output=\n{}", output);
+  }
+
+  @Test
+  public void testSetCapacity() throws Throwable {
+    S3GuardTool cmd = new S3GuardTool.SetCapacity(getFileSystem().getConf());
+    cmd.setStore(getMetadataStore());
+    String output = exec(cmd, cmd.getName(),
+        "-" + READ_FLAG, "100",
+        "-" + WRITE_FLAG, "100",
+        getFileSystem().getUri().toString());
+    LOG.info("Exec output=\n{}", output);
+  }
+
+
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/47011d7d/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/TestS3GuardCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/TestS3GuardCLI.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/TestS3GuardCLI.java
new file mode 100644
index 0000000..43256b9
--- /dev/null
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/TestS3GuardCLI.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a.s3guard;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.s3a.S3ATestConstants;
+import org.apache.hadoop.test.LambdaTestUtils;
+import org.apache.hadoop.util.ExitUtil;
+
+import static org.apache.hadoop.fs.s3a.s3guard.S3GuardTool.*;
+
+/**
+ * Test the S3Guard CLI entry point.
+ */
+public class TestS3GuardCLI extends Assert {
+
+  /**
+   * Run a S3GuardTool command from a varags list.
+   * @param args argument list
+   * @return the return code
+   * @throws Exception any exception
+   */
+  protected int run(String... args)
+      throws Exception {
+    Configuration conf = new Configuration(false);
+    return S3GuardTool.run(conf, args);
+  }
+
+  /**
+   * Run a S3GuardTool command from a varags list, catch any raised
+   * ExitException and verify the status code matches that expected.
+   * @param status expected status code of an exception
+   * @param args argument list
+   * @throws Exception any exception
+   */
+  protected void runToFailure(int status, String... args)
+      throws Exception {
+    ExitUtil.ExitException ex =
+        LambdaTestUtils.intercept(ExitUtil.ExitException.class,
+            () -> run(args));
+    if (ex.status != status) {
+      throw ex;
+    }
+  }
+
+  @Test
+  public void testInfoNoArgs() throws Throwable {
+    runToFailure(INVALID_ARGUMENT, BucketInfo.NAME);
+  }
+
+  @Test
+  public void testInfoWrongFilesystem() throws Throwable {
+    runToFailure(INVALID_ARGUMENT,
+        BucketInfo.NAME, "file://");
+  }
+
+  @Test
+  public void testNoCommand() throws Throwable {
+    runToFailure(E_USAGE);
+  }
+
+  @Test
+  public void testUnknownCommand() throws Throwable {
+    runToFailure(E_USAGE, "unknown");
+  }
+
+  @Test
+  public void testPruneNoArgs() throws Throwable {
+    runToFailure(INVALID_ARGUMENT, Prune.NAME);
+  }
+
+  @Test
+  public void testDiffNoArgs() throws Throwable {
+    runToFailure(INVALID_ARGUMENT, Diff.NAME);
+  }
+
+  @Test
+  public void testImportNoArgs() throws Throwable {
+    runToFailure(INVALID_ARGUMENT, Import.NAME);
+  }
+
+  @Test
+  public void testDestroyNoArgs() throws Throwable {
+    runToFailure(INVALID_ARGUMENT, Destroy.NAME);
+  }
+
+  @Test
+  public void testDestroyUnknownTableNoRegion() throws Throwable {
+    runToFailure(INVALID_ARGUMENT, Destroy.NAME,
+        "-meta", "dynamodb://ireland-team");
+  }
+
+  @Test
+  public void testInitBucketAndRegion() throws Throwable {
+    runToFailure(INVALID_ARGUMENT, Init.NAME,
+        "-meta", "dynamodb://ireland-team",
+        "-region", "eu-west-1",
+        S3ATestConstants.DEFAULT_CSVTEST_FILE
+    );
+  }
+
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org