You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-issues@hadoop.apache.org by GitBox <gi...@apache.org> on 2022/02/21 14:41:00 UTC

[GitHub] [hadoop] dannycjones commented on a change in pull request #4006: HADOOP-13294. Test hadoop fs shell against s3a

dannycjones commented on a change in pull request #4006:
URL: https://github.com/apache/hadoop/pull/4006#discussion_r811183975



##########
File path: hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFsShell.java
##########
@@ -0,0 +1,590 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.concurrent.ThreadLocalRandom;
+
+import org.junit.Test;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.FsStatus;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+
+import static org.apache.hadoop.fs.s3a.impl.HeaderProcessing.decodeBytes;
+import static org.apache.hadoop.test.GenericTestUtils.getTempPath;
+
+/**
+ * Test of hadoop fs shell against S3A
+ */
+public class ITestS3AFsShell extends AbstractS3ATestBase {
+
+  // block size for generated test file in byte
+  private static final int BLOCK_SIZE = 1024;
+
+  private FileSystem fs;
+  private LocalFileSystem lfs;
+  private FsShell fsShell;
+
+  @Override
+  public void setup() throws Exception {
+    super.setup();
+    Configuration conf = getConfiguration();
+    fs = getFileSystem();
+    lfs = FileSystem.getLocal(conf);
+    fsShell = new FsShell(conf);
+  }
+
+  private int shellRun(String... args) {
+    return fsShell.run(args);
+  }
+
+  @Test
+  public void testFsShellDirectoryOperations() throws IOException {
+    Path testDir = methodPath();
+
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    ByteArrayOutputStream err = new ByteArrayOutputStream();
+    System.setOut(new PrintStream(out));
+    System.setErr(new PrintStream(err));
+
+    assertEquals("Should create directory success", 0, shellRun("-mkdir", testDir.toString()));
+    assertTrue("Directory should exist", fs.getFileStatus(testDir).isDirectory());
+
+    assertEquals("Should recursively create directory success", 0,
+        shellRun("-mkdir", "-p", testDir + "/subdir1/subdir2"));
+    assertTrue("Directory should exist",
+        fs.getFileStatus(new Path(testDir, "subdir1/subdir2")).isDirectory());
+
+    // create a new bucket with hadoop fs will return error file exists
+    // because innerGetFileStatus return root directory status without a probe
+    String newBucketName =
+        "hadoop-fs-shell-test-" + ThreadLocalRandom.current().nextLong(Long.MAX_VALUE);
+    assertNotEquals("Should not be able to create new bucket", 0,
+        shellRun("-mkdir", "s3a://" + newBucketName + "/"));
+    assertTrue(err.toString().contains("mkdir: `s3a://" + newBucketName + "/': File exists"));
+    err.reset();
+
+    assertEquals("Should list directory success", 0, shellRun("-ls", testDir.toString()));
+    assertTrue("Should found one item", out.toString().contains("Found 1 items"));
+    assertTrue("Should print file list to stdout", out.toString().contains(testDir + "/subdir1"));
+    out.reset();
+
+    assertEquals("Should list directory as a plain file success", 0,
+        shellRun("-ls", "-d", testDir.toString()));
+    assertTrue("Should print directory path to stdout",
+        out.toString().contains(testDir.toString()));
+    out.reset();
+
+    assertNotEquals("Should fail when list with display erasure coding policy flag", 0,
+        shellRun("-ls", "-e", testDir.toString()));
+    assertTrue(err.toString()
+        .contains("FileSystem " + fs.getUri().toString() + " does not support Erasure Coding"));
+    err.reset();
+
+    assertEquals("Should recursively list directory success", 0,
+        shellRun("-ls", "-R", testDir.toString()));
+    assertTrue("Should print file list to stdout", out.toString().contains(testDir.toString()));
+    assertTrue("Should print file list to stdout", out.toString().contains(testDir + "/subdir1"));
+    assertTrue("Should print file list to stdout",
+        out.toString().contains(testDir + "/subdir1/subdir2"));
+    out.reset();
+
+    assertEquals("Should delete directory success", 0,
+        shellRun("-rmdir", testDir + "/subdir1/subdir2"));
+    assertFalse("Directory should not exist", fs.exists(new Path(testDir, "subdir1/subdir2")));
+
+    assertNotEquals("Should not be able to delete non-empty directory", 0,
+        shellRun("-rmdir", testDir.toString()));
+    assertTrue(err.toString().contains("Directory is not empty"));
+    assertTrue("Directory should exist", fs.exists(testDir));
+    err.reset();
+
+    assertEquals("Should recursively delete directory success", 0,
+        shellRun("-rm", "-r", testDir.toString()));
+    assertFalse("Directory should not exist", fs.exists(testDir));
+
+    assertNotEquals("Should not be able to delete root directory", 0,
+        shellRun("-rm", "-r", "-f", fs.getUri().toString() + "/"));
+    assertTrue(err.toString().contains("Input/output error"));
+    err.reset();
+  }
+
+  @Test
+  public void testFsShellFileOperations() throws IOException {

Review comment:
       This method is too long, and raises a checkstyle violation.
   
   ```
   ./hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFsShell.java:141:  @Test:3: Method length is 155 lines (max allowed is 150). [MethodLength]
   ```
   
   Can we break it down? Maybe move the stream concatenation (cat, head, tail) to another method?

##########
File path: hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFsShell.java
##########
@@ -0,0 +1,590 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.concurrent.ThreadLocalRandom;
+
+import org.junit.Test;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.FsStatus;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+
+import static org.apache.hadoop.fs.s3a.impl.HeaderProcessing.decodeBytes;
+import static org.apache.hadoop.test.GenericTestUtils.getTempPath;
+
+/**
+ * Test of hadoop fs shell against S3A
+ */

Review comment:
       This adds a new checkstyle violation, can you push a fix?
   
   ```
   ./hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFsShell.java:45:/**: First sentence should end with a period. [JavadocStyle]
   ```
   
   https://ci-hadoop.apache.org/job/hadoop-multibranch/job/PR-4006/1/artifact/out/results-checkstyle-hadoop-tools_hadoop-aws.txt




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: common-issues-unsubscribe@hadoop.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: common-issues-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-issues-help@hadoop.apache.org