You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cl...@apache.org on 2018/07/30 19:03:37 UTC

hadoop git commit: Revert "HADOOP-15637. LocalFs#listLocatedStatus does not filter out hidden .crc files. Contributed by Erik Krogen."

Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 72d908acd -> e95878632


Revert "HADOOP-15637. LocalFs#listLocatedStatus does not filter out hidden .crc files. Contributed by Erik Krogen."

This reverts commit 72d908acdb112722fc1f3fea773e71838982e196.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e9587863
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e9587863
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e9587863

Branch: refs/heads/branch-2.8
Commit: e95878632cc85e6ad67409a680828287a83b4b75
Parents: 72d908a
Author: Chen Liang <cl...@apache.org>
Authored: Mon Jul 30 12:03:29 2018 -0700
Committer: Chen Liang <cl...@apache.org>
Committed: Mon Jul 30 12:03:29 2018 -0700

----------------------------------------------------------------------
 .../java/org/apache/hadoop/fs/ChecksumFs.java   | 37 -------------------
 .../fs/FileContextMainOperationsBaseTest.java   | 38 --------------------
 2 files changed, 75 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e9587863/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
index 5c54554..384b32c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
@@ -26,7 +26,6 @@ import java.nio.channels.ClosedChannelException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.EnumSet;
-import java.util.NoSuchElementException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -34,7 +33,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.Progressable;
 
@@ -528,39 +526,4 @@ public abstract class ChecksumFs extends FilterFs {
     }
     return results.toArray(new FileStatus[results.size()]);
   }
-
-  @Override
-  public RemoteIterator<LocatedFileStatus> listLocatedStatus(final Path f)
-      throws AccessControlException, FileNotFoundException,
-             UnresolvedLinkException, IOException {
-    final RemoteIterator<LocatedFileStatus> iter =
-        getMyFs().listLocatedStatus(f);
-    return new RemoteIterator<LocatedFileStatus>() {
-
-      private LocatedFileStatus next = null;
-
-      @Override
-      public boolean hasNext() throws IOException {
-        while (next == null && iter.hasNext()) {
-          LocatedFileStatus unfilteredNext = iter.next();
-          if (!isChecksumFile(unfilteredNext.getPath())) {
-            next = unfilteredNext;
-          }
-        }
-        return next != null;
-      }
-
-      @Override
-      public LocatedFileStatus next() throws IOException {
-        if (!hasNext()) {
-          throw new NoSuchElementException();
-        }
-        LocatedFileStatus tmp = next;
-        next = null;
-        return tmp;
-      }
-
-    };
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e9587863/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
index ab93333..c835076 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
@@ -368,44 +368,6 @@ public abstract class FileContextMainOperationsBaseTest  {
     pathsIterator = fc.listStatus(getTestRootPath(fc, "test/hadoop/a"));
     Assert.assertFalse(pathsIterator.hasNext());
   }
-
-  @Test
-  public void testListFiles() throws Exception {
-    Path[] testDirs = {
-        getTestRootPath(fc, "test/dir1"),
-        getTestRootPath(fc, "test/dir1/dir1"),
-        getTestRootPath(fc, "test/dir2")
-    };
-    Path[] testFiles = {
-        new Path(testDirs[0], "file1"),
-        new Path(testDirs[0], "file2"),
-        new Path(testDirs[1], "file2"),
-        new Path(testDirs[2], "file1")
-    };
-
-    for (Path path : testDirs) {
-      fc.mkdir(path, FsPermission.getDefault(), true);
-    }
-    for (Path p : testFiles) {
-      FSDataOutputStream out = fc.create(p).build();
-      out.writeByte(0);
-      out.close();
-    }
-
-    RemoteIterator<LocatedFileStatus> filesIterator =
-        fc.util().listFiles(getTestRootPath(fc, "test"), true);
-    LocatedFileStatus[] fileStats =
-        new LocatedFileStatus[testFiles.length];
-    for (int i = 0; i < fileStats.length; i++) {
-      assertTrue(filesIterator.hasNext());
-      fileStats[i] = filesIterator.next();
-    }
-    assertFalse(filesIterator.hasNext());
-
-    for (Path p : testFiles) {
-      assertTrue(containsPath(p, fileStats));
-    }
-  }
   
   @Test
   public void testListStatusFilterWithNoMatches() throws Exception {


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org