From common-commits-return-86137-archive-asf-public=cust-asf.ponee.io@hadoop.apache.org Mon Jul 30 19:33:33 2018 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx-eu-01.ponee.io (Postfix) with SMTP id 6AE34180630 for ; Mon, 30 Jul 2018 19:33:33 +0200 (CEST) Received: (qmail 91839 invoked by uid 500); 30 Jul 2018 17:33:32 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 91829 invoked by uid 99); 30 Jul 2018 17:33:32 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 30 Jul 2018 17:33:32 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 2E3E9DFC43; Mon, 30 Jul 2018 17:33:32 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: cliang@apache.org To: common-commits@hadoop.apache.org Message-Id: <310d68a70f9d4295871334e409bc18fe@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: hadoop git commit: HADOOP-15637. LocalFs#listLocatedStatus does not filter out hidden .crc files. Contributed by Erik Krogen. Date: Mon, 30 Jul 2018 17:33:32 +0000 (UTC) Repository: hadoop Updated Branches: refs/heads/branch-3.0 a4fc0279f -> f60b50be3 HADOOP-15637. LocalFs#listLocatedStatus does not filter out hidden .crc files. Contributed by Erik Krogen. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f60b50be Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f60b50be Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f60b50be Branch: refs/heads/branch-3.0 Commit: f60b50be3d0875962ca72e63fbfd14f7c2a10042 Parents: a4fc027 Author: Chen Liang Authored: Mon Jul 30 10:25:07 2018 -0700 Committer: Chen Liang Committed: Mon Jul 30 10:33:24 2018 -0700 ---------------------------------------------------------------------- .../java/org/apache/hadoop/fs/ChecksumFs.java | 37 +++++++++++++++++++ .../fs/FileContextMainOperationsBaseTest.java | 38 ++++++++++++++++++++ 2 files changed, 75 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/f60b50be/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java index 75622ad..c56f6e0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java @@ -27,10 +27,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; +import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.Progressable; import org.slf4j.Logger; @@ -527,4 +529,39 @@ public abstract class ChecksumFs extends FilterFs { } return results.toArray(new FileStatus[results.size()]); } + + @Override + public RemoteIterator listLocatedStatus(final Path f) + throws AccessControlException, FileNotFoundException, + UnresolvedLinkException, IOException { + final RemoteIterator iter = + getMyFs().listLocatedStatus(f); + return new RemoteIterator() { + + private LocatedFileStatus next = null; + + @Override + public boolean hasNext() throws IOException { + while (next == null && iter.hasNext()) { + LocatedFileStatus unfilteredNext = iter.next(); + if (!isChecksumFile(unfilteredNext.getPath())) { + next = unfilteredNext; + } + } + return next != null; + } + + @Override + public LocatedFileStatus next() throws IOException { + if (!hasNext()) { + throw new NoSuchElementException(); + } + LocatedFileStatus tmp = next; + next = null; + return tmp; + } + + }; + } + } http://git-wip-us.apache.org/repos/asf/hadoop/blob/f60b50be/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java index 35ec4ff..d090f02 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java @@ -369,6 +369,44 @@ public abstract class FileContextMainOperationsBaseTest { pathsIterator = fc.listStatus(getTestRootPath(fc, "test/hadoop/a")); Assert.assertFalse(pathsIterator.hasNext()); } + + @Test + public void testListFiles() throws Exception { + Path[] testDirs = { + getTestRootPath(fc, "test/dir1"), + getTestRootPath(fc, "test/dir1/dir1"), + getTestRootPath(fc, "test/dir2") + }; + Path[] testFiles = { + new Path(testDirs[0], "file1"), + new Path(testDirs[0], "file2"), + new Path(testDirs[1], "file2"), + new Path(testDirs[2], "file1") + }; + + for (Path path : testDirs) { + fc.mkdir(path, FsPermission.getDefault(), true); + } + for (Path p : testFiles) { + FSDataOutputStream out = fc.create(p).build(); + out.writeByte(0); + out.close(); + } + + RemoteIterator filesIterator = + fc.util().listFiles(getTestRootPath(fc, "test"), true); + LocatedFileStatus[] fileStats = + new LocatedFileStatus[testFiles.length]; + for (int i = 0; i < fileStats.length; i++) { + assertTrue(filesIterator.hasNext()); + fileStats[i] = filesIterator.next(); + } + assertFalse(filesIterator.hasNext()); + + for (Path p : testFiles) { + assertTrue(containsPath(p, fileStats)); + } + } @Test public void testListStatusFilterWithNoMatches() throws Exception { --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org