You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-issues@hadoop.apache.org by GitBox <gi...@apache.org> on 2020/03/16 20:46:17 UTC

[GitHub] [hadoop] steveloughran commented on a change in pull request #1893: HADOOP-16920 ABFS: Make list page size configurable

steveloughran commented on a change in pull request #1893: HADOOP-16920 ABFS: Make list page size configurable
URL: https://github.com/apache/hadoop/pull/1893#discussion_r393298569
 
 

 ##########
 File path: hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java
 ##########
 @@ -75,4 +89,80 @@ public void testUnknownHost() throws Exception {
             "UnknownHostException: " + fakeAccountName,
             () -> FileSystem.get(conf.getRawConfiguration()));
   }
+
+  @Test
+  public void testListPathWithValidListMaxResultsValues()
+      throws IOException, ExecutionException, InterruptedException {
+    final int fileCount = 10;
+    final String directory = "testWithValidListMaxResultsValues";
+    createDirectoryWithNFiles(directory, fileCount);
+    final int[] testData = {fileCount + 100, fileCount + 1, fileCount,
+        fileCount - 1, 1};
+    for (int i = 0; i < testData.length; i++) {
+      int listMaxResults = testData[i];
+      setListMaxResults(listMaxResults);
+      int expectedListResultsSize =
+          listMaxResults > fileCount ? fileCount : listMaxResults;
+      assertThat(listPath(directory).size(),
+          is(equalTo(expectedListResultsSize)));
+    }
+  }
+
+  @Test
+  public void testListPathWithValueGreaterThanServerMaximum()
+      throws IOException, ExecutionException, InterruptedException {
+    setListMaxResults(LIST_MAX_RESULTS_SERVER + 100);
+    final String directory = "testWithValueGreaterThanServerMaximum";
+    createDirectoryWithNFiles(directory, LIST_MAX_RESULTS_SERVER + 200);
+    assertThat(listPath(directory).size(),
+        is(equalTo(LIST_MAX_RESULTS_SERVER)));
+  }
+
+  @Test
+  public void testListPathWithInvalidListMaxResultsValues() throws Exception {
+    for (int i = -1; i < 1; i++) {
+      setListMaxResults(i);
+      intercept(AbfsRestOperationException.class, "Operation failed: \"One of "
+          + "the query parameters specified in the request URI is outside" + " "
+          + "the permissible range.", () -> listPath("directory"));
+    }
+  }
+
+  private List<ListResultEntrySchema> listPath(String directory)
+      throws IOException {
+    return getFileSystem().getAbfsClient()
+        .listPath(directory, false, getListMaxResults(), null).getResult()
+        .getListResultSchema().paths();
+  }
+
+  private int getListMaxResults() throws IOException {
+    return getFileSystem().getAbfsStore().getAbfsConfiguration()
+        .getListMaxResults();
+  }
+
+  private void setListMaxResults(int listMaxResults) throws IOException {
+    getFileSystem().getAbfsStore().getAbfsConfiguration()
+        .setListMaxResults(listMaxResults);
+  }
+
+  private void createDirectoryWithNFiles(String directory, int n)
+      throws ExecutionException, InterruptedException {
+    final List<Future<Void>> tasks = new ArrayList<>();
+    ExecutorService es = Executors.newFixedThreadPool(10);
+    for (int i = 0; i < n; i++) {
+      final Path fileName = new Path("/" + directory + "/test" + i);
+      Callable<Void> callable = new Callable<Void>() {
 
 Review comment:
   that;s just a lambda expression, you should be able to go es.submit(() -> { touch(filename); return null; }

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: common-issues-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-issues-help@hadoop.apache.org