From common-commits-return-79292-archive-asf-public=cust-asf.ponee.io@hadoop.apache.org Fri Mar 2 06:47:57 2018 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx-eu-01.ponee.io (Postfix) with SMTP id CA8E018062F for ; Fri, 2 Mar 2018 06:47:56 +0100 (CET) Received: (qmail 50959 invoked by uid 500); 2 Mar 2018 05:47:45 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 45180 invoked by uid 99); 2 Mar 2018 05:47:41 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 02 Mar 2018 05:47:41 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 2E0D4F4E6C; Fri, 2 Mar 2018 05:47:40 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: rohithsharmaks@apache.org To: common-commits@hadoop.apache.org Date: Fri, 02 Mar 2018 05:48:27 -0000 Message-Id: In-Reply-To: <4b827a86b9d745a799ee6a1361f16456@git.apache.org> References: <4b827a86b9d745a799ee6a1361f16456@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [49/50] [abbrv] hadoop git commit: HDFS-13211. Fix a bug in DirectoryDiffList.getMinListForRange. Contributed by Shashikant Banerjee HDFS-13211. Fix a bug in DirectoryDiffList.getMinListForRange. Contributed by Shashikant Banerjee Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d9900d4a Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d9900d4a Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d9900d4a Branch: refs/heads/YARN-7055 Commit: d9900d4a0c880fe4abbaf7175800e4c708fec745 Parents: 628e246 Author: Tsz-Wo Nicholas Sze Authored: Thu Mar 1 14:12:15 2018 -0800 Committer: Rohith Sharma K S Committed: Fri Mar 2 11:08:29 2018 +0530 ---------------------------------------------------------------------- .../namenode/snapshot/DirectoryDiffList.java | 37 +++- .../apache/hadoop/hdfs/util/ReadOnlyList.java | 18 ++ .../snapshot/TestDirectoryDiffList.java | 182 +++++++++++-------- 3 files changed, 156 insertions(+), 81 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/d9900d4a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffList.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffList.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffList.java index 90b01e5..d2b20d3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffList.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryDiffList.java @@ -97,7 +97,13 @@ public class DirectoryDiffList implements DiffList { public void setDiff(ChildrenDiff diff) { this.diff = diff; } + + @Override + public String toString() { + return "->" + skipTo; + } } + /** * SkipListNode is an implementation of a DirectoryDiff List node, * which stores a Directory Diff and references to subsequent nodes. @@ -191,6 +197,11 @@ public class DirectoryDiffList implements DiffList { return skipDiffList.get(level).getSkipTo(); } } + + @Override + public String toString() { + return diff != null ? "" + diff.getSnapshotId() : "?"; + } } /** @@ -225,13 +236,6 @@ public class DirectoryDiffList implements DiffList { this.skipInterval = interval; } - public static DirectoryDiffList createSkipList(int capacity, int interval, - int skipLevel) { - DirectoryDiffList list = - new DirectoryDiffList(capacity, interval, skipLevel); - return list; - } - /** * Adds the specified data element to the beginning of the SkipList, * if the element is not already present. @@ -444,15 +448,15 @@ public class DirectoryDiffList implements DiffList { * order to generate all the changes occurred between fromIndex and * toIndex. * - * @param fromIndex index from where the summation has to start - * @param toIndex index till where the summation has to end + * @param fromIndex index from where the summation has to start(inclusive) + * @param toIndex index till where the summation has to end(exclusive) * @return list of Directory Diff */ @Override public List getMinListForRange(int fromIndex, int toIndex, INodeDirectory dir) { final List subList = new ArrayList<>(); - final int toSnapshotId = get(toIndex).getSnapshotId(); + final int toSnapshotId = get(toIndex - 1).getSnapshotId(); for (SkipListNode current = getNode(fromIndex); current != null;) { SkipListNode next = null; ChildrenDiff childrenDiff = null; @@ -467,8 +471,21 @@ public class DirectoryDiffList implements DiffList { subList.add(childrenDiff == null ? curDiff : new DirectoryDiff(curDiff.getSnapshotId(), dir, childrenDiff)); + if (current.getDiff().compareTo(toSnapshotId) == 0) { + break; + } current = next; } return subList; } + + @Override + public String toString() { + final StringBuilder b = new StringBuilder(getClass().getSimpleName()); + b.append("\nhead: ").append(head).append(head.skipDiffList); + for (SkipListNode n : skipNodeList) { + b.append("\n ").append(n).append(n.skipDiffList); + } + return b.toString(); + } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d9900d4a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/ReadOnlyList.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/ReadOnlyList.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/ReadOnlyList.java index 6da7dba..1d76f0e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/ReadOnlyList.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/ReadOnlyList.java @@ -106,6 +106,11 @@ public interface ReadOnlyList extends Iterable { public E get(int i) { return list.get(i); } + + @Override + public String toString() { + return list.toString(); + } }; } @@ -234,6 +239,19 @@ public interface ReadOnlyList extends Iterable { public T[] toArray(T[] a) { throw new UnsupportedOperationException(); } + + @Override + public String toString() { + if (list.isEmpty()) { + return "[]"; + } + final Iterator i = list.iterator(); + final StringBuilder b = new StringBuilder("[").append(i.next()); + for(; i.hasNext();) { + b.append(", ").append(i.next()); + } + return b + "]"; + } }; } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d9900d4a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDirectoryDiffList.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDirectoryDiffList.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDirectoryDiffList.java index fb6e7f4..99b6da2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDirectoryDiffList.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestDirectoryDiffList.java @@ -19,14 +19,13 @@ package org.apache.hadoop.hdfs.server.namenode.snapshot; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.server.namenode.FSDirectory; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.INodeDirectory; +import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff; import org.apache.hadoop.hdfs.util.ReadOnlyList; import org.junit.After; @@ -34,8 +33,8 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.Collections; import java.util.List; -import java.util.Iterator; /** * This class tests the DirectoryDiffList API's. @@ -45,11 +44,6 @@ public class TestDirectoryDiffList{ SnapshotTestHelper.disableLogs(); } - private static final long SEED = 0; - private static final short REPL = 3; - private static final short REPL_2 = 1; - private static final long BLOCKSIZE = 1024; - private static final Configuration CONF = new Configuration(); private static MiniDFSCluster cluster; private static FSNamesystem fsn; @@ -58,9 +52,8 @@ public class TestDirectoryDiffList{ @Before public void setUp() throws Exception { - CONF.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCKSIZE); - cluster = new MiniDFSCluster.Builder(CONF).numDataNodes(REPL).format(true) - .build(); + cluster = + new MiniDFSCluster.Builder(CONF).numDataNodes(0).format(true).build(); cluster.waitActive(); fsn = cluster.getNamesystem(); fsdir = fsn.getFSDirectory(); @@ -75,88 +68,135 @@ public class TestDirectoryDiffList{ } } - private void compareChildrenList(ReadOnlyList list1, - ReadOnlyList list2) { - Assert.assertEquals(list1.size(), list2.size()); - for (int index = 0; index < list1.size(); index++) { - Assert.assertEquals(list1.get(index), list2.get(index)); + static void assertList(List expected, List computed) { + Assert.assertEquals(expected.size(), computed.size()); + for (int index = 0; index < expected.size(); index++) { + Assert.assertEquals(expected.get(index), computed.get(index)); + } + } + + static void verifyChildrenList(DirectoryDiffList skip, INodeDirectory dir) { + final int n = skip.size(); + for (int i = 0; i < skip.size(); i++) { + final List expected = + ReadOnlyList.Util.asList(dir.getChildrenList(i)); + final List computed = getChildrenList(skip, i, n, dir); + try { + assertList(expected, computed); + } catch (AssertionError ae) { + throw new AssertionError( + "i = " + i + "\ncomputed = " + computed + "\nexpected = " + + expected, ae); + } } } - private void verifyChildrenListForAllSnapshots(DirectoryDiffList list, - INodeDirectory dir) { - for (int index = 0; - index < list.size(); index++) { - ReadOnlyList list1 = dir.getChildrenList(index); - List subList = - list.getMinListForRange(index, list.size() - 1, dir); - ReadOnlyList list2 = getChildrenList(dir, subList); - compareChildrenList(list1, list2); + static void verifyChildrenList( + DiffList array, DirectoryDiffList skip, + INodeDirectory dir, List childrenList) { + final int n = array.size(); + Assert.assertEquals(n, skip.size()); + for (int i = 0; i < n - 1; i++) { + for (int j = i + 1; j < n - 1; j++) { + final List expected = getCombined(array, i, j, dir) + .apply2Previous(childrenList); + final List computed = getCombined(skip, i, j, dir) + .apply2Previous(childrenList); + try { + assertList(expected, computed); + } catch (AssertionError ae) { + throw new AssertionError( + "i = " + i + ", j = " + j + "\ncomputed = " + computed + + "\nexpected = " + expected + "\n" + skip, ae); + } + } } } - private ReadOnlyList getChildrenList(INodeDirectory currentINode, - List list) { - List children = null; - final DirectoryWithSnapshotFeature.ChildrenDiff - combined = new DirectoryWithSnapshotFeature.ChildrenDiff(); - for (DirectoryWithSnapshotFeature.DirectoryDiff d : list) { + private static ChildrenDiff getCombined( + DiffList list, int from, int to, INodeDirectory dir) { + final List minList = list.getMinListForRange(from, to, dir); + final ChildrenDiff combined = new ChildrenDiff(); + for (DirectoryDiff d : minList) { combined.combinePosterior(d.getChildrenDiff(), null); } - children = combined.apply2Current(ReadOnlyList.Util.asList( - currentINode.getChildrenList(Snapshot.CURRENT_STATE_ID))); - return ReadOnlyList.Util.asReadOnlyList(children); + return combined; + } + + static List getChildrenList( + DiffList list, int from, int to, INodeDirectory dir) { + final ChildrenDiff combined = getCombined(list, from, to, dir); + return combined.apply2Current(ReadOnlyList.Util.asList(dir.getChildrenList( + Snapshot.CURRENT_STATE_ID))); + } + + static Path getChildPath(Path parent, int i) { + return new Path(parent, "c" + i); } @Test - public void testDirectoryDiffList() throws Exception { - final Path sdir1 = new Path("/dir1"); - hdfs.mkdirs(sdir1); - SnapshotTestHelper.createSnapshot(hdfs, sdir1, "s0"); - for (int i = 1; i < 31; i++) { - final Path file = new Path(sdir1, "file" + i); - DFSTestUtil.createFile(hdfs, file, BLOCKSIZE, REPL_2, SEED); - SnapshotTestHelper.createSnapshot(hdfs, sdir1, "s" + i); + public void testAddLast() throws Exception { + testAddLast(7); + } + + static void testAddLast(int n) throws Exception { + final Path root = new Path("/testAddLast" + n); + hdfs.mkdirs(root); + SnapshotTestHelper.createSnapshot(hdfs, root, "s0"); + for (int i = 1; i < n; i++) { + final Path child = getChildPath(root, i); + hdfs.mkdirs(child); + SnapshotTestHelper.createSnapshot(hdfs, root, "s" + i); } - INodeDirectory sdir1Node = fsdir.getINode(sdir1.toString()).asDirectory(); - DiffList diffs = - sdir1Node.getDiffs().asList(); - - Iterator itr = diffs.iterator(); - DirectoryDiffList skipList = DirectoryDiffList.createSkipList(0, 3, 5); - while (itr.hasNext()) { - skipList.addLast(itr.next()); + INodeDirectory dir = fsdir.getINode(root.toString()).asDirectory(); + DiffList diffs = dir.getDiffs().asList(); + + final DirectoryDiffList skipList = new DirectoryDiffList(0, 3, 5); + final DiffList arrayList = new DiffListByArrayList<>(0); + for (DirectoryDiff d : diffs) { + skipList.addLast(d); + arrayList.addLast(d); } + // verify that the both the children list obtained from hdfs and // DirectoryDiffList are same - verifyChildrenListForAllSnapshots(skipList, sdir1Node); + verifyChildrenList(skipList, dir); + verifyChildrenList(arrayList, skipList, dir, Collections.emptyList()); } + @Test - public void testDirectoryDiffList2() throws Exception { - final Path sdir1 = new Path("/dir1"); - hdfs.mkdirs(sdir1); - for (int i = 1; i < 31; i++) { - final Path file = new Path(sdir1, "file" + i); - DFSTestUtil.createFile(hdfs, file, BLOCKSIZE, REPL_2, SEED); + public void testAddFirst() throws Exception { + testAddFirst(7); + } + + static void testAddFirst(int n) throws Exception { + final Path root = new Path("/testAddFirst" + n); + hdfs.mkdirs(root); + for (int i = 1; i < n; i++) { + final Path child = getChildPath(root, i); + hdfs.mkdirs(child); } - SnapshotTestHelper.createSnapshot(hdfs, sdir1, "s0"); - for (int i = 1; i < 31; i++) { - final Path file = new Path(sdir1, "file" + (31 - i)); - hdfs.delete(file, false); - SnapshotTestHelper.createSnapshot(hdfs, sdir1, "s" + i); + INodeDirectory dir = fsdir.getINode(root.toString()).asDirectory(); + SnapshotTestHelper.createSnapshot(hdfs, root, "s0"); + for (int i = 1; i < n; i++) { + final Path child = getChildPath(root, n - i); + hdfs.delete(child, false); + SnapshotTestHelper.createSnapshot(hdfs, root, "s" + i); } - INodeDirectory sdir1Node = fsdir.getINode(sdir1.toString()).asDirectory(); - DiffList diffs = sdir1Node.getDiffs().asList(); - - int index = diffs.size() - 1; - DirectoryDiffList skipList = DirectoryDiffList.createSkipList(0, 3, 5); - while (index >= 0) { - skipList.addFirst(diffs.get(index)); - index--; + DiffList diffs = dir.getDiffs().asList(); + List childrenList = ReadOnlyList.Util.asList(dir.getChildrenList( + diffs.get(0).getSnapshotId())); + final DirectoryDiffList skipList = new DirectoryDiffList(0, 3, 5); + final DiffList arrayList = new DiffListByArrayList<>(0); + for (int i = diffs.size() - 1; i >= 0; i--) { + final DirectoryDiff d = diffs.get(i); + skipList.addFirst(d); + arrayList.addFirst(d); } // verify that the both the children list obtained from hdfs and // DirectoryDiffList are same - verifyChildrenListForAllSnapshots(skipList, sdir1Node); + verifyChildrenList(skipList, dir); + verifyChildrenList(arrayList, skipList, dir, childrenList); } } \ No newline at end of file --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org