Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 0454A200D62 for ; Sat, 2 Dec 2017 03:38:28 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id 02EEA160C1A; Sat, 2 Dec 2017 02:38:28 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 2A8AF160C18 for ; Sat, 2 Dec 2017 03:38:27 +0100 (CET) Received: (qmail 14422 invoked by uid 500); 2 Dec 2017 02:38:19 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 12235 invoked by uid 99); 2 Dec 2017 02:38:17 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 02 Dec 2017 02:38:17 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id C4282F60DF; Sat, 2 Dec 2017 02:38:15 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: virajith@apache.org To: common-commits@hadoop.apache.org Date: Sat, 02 Dec 2017 02:38:16 -0000 Message-Id: In-Reply-To: <9e0d6d1ded754e118ac08ad7dbad2fe9@git.apache.org> References: <9e0d6d1ded754e118ac08ad7dbad2fe9@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [03/50] [abbrv] hadoop git commit: HDFS-12594. snapshotDiff fails if the report exceeds the RPC response limit. Contributed by Shashikant Banerjee archived-at: Sat, 02 Dec 2017 02:38:28 -0000 http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1c7654e/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDiffReport.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDiffReport.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDiffReport.java index e0a7b5b..a4fb8ab 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDiffReport.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDiffReport.java @@ -90,6 +90,7 @@ public class TestSnapshotDiffReport { conf.setBoolean( DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_DIFF_ALLOW_SNAP_ROOT_DESCENDANT, true); + conf.setInt(DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_DIFF_LISTING_LIMIT, 3); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(REPLICATION) .format(true).build(); cluster.waitActive(); @@ -1293,4 +1294,119 @@ public class TestSnapshotDiffReport { assertAtimeNotEquals(filePostSS, root, "s2", "s3"); } + + /** + * Tests to verfy the diff report with maximum SnapsdiffReportEntries limit + * over an rpc being set to 3. + * @throws Exception + */ + @Test + public void testDiffReportWithRpcLimit() throws Exception { + final Path root = new Path("/"); + hdfs.mkdirs(root); + for (int i = 1; i < 4; i++) { + final Path path = new Path(root, "dir" + i); + hdfs.mkdirs(path); + } + SnapshotTestHelper.createSnapshot(hdfs, root, "s0"); + for (int i = 1; i < 4; i++) { + final Path path = new Path(root, "dir" + i); + for (int j = 1; j < 4; j++) { + final Path file = new Path(path, "file" + j); + DFSTestUtil.createFile(hdfs, file, BLOCKSIZE, REPLICATION, SEED); + } + } + + SnapshotTestHelper.createSnapshot(hdfs, root, "s1"); + verifyDiffReport(root, "s0", "s1", + new DiffReportEntry(DiffType.MODIFY, DFSUtil.string2Bytes("")), + new DiffReportEntry(DiffType.MODIFY, DFSUtil.string2Bytes("dir1")), + new DiffReportEntry(DiffType.CREATE, + DFSUtil.string2Bytes("dir1/file1")), + new DiffReportEntry(DiffType.CREATE, + DFSUtil.string2Bytes("dir1/file2")), + new DiffReportEntry(DiffType.CREATE, + DFSUtil.string2Bytes("dir1/file3")), + new DiffReportEntry(DiffType.MODIFY, DFSUtil.string2Bytes("dir2")), + new DiffReportEntry(DiffType.CREATE, + DFSUtil.string2Bytes("dir2/file1")), + new DiffReportEntry(DiffType.CREATE, + DFSUtil.string2Bytes("dir2/file2")), + new DiffReportEntry(DiffType.CREATE, + DFSUtil.string2Bytes("dir2/file3")), + new DiffReportEntry(DiffType.MODIFY, DFSUtil.string2Bytes("dir3")), + new DiffReportEntry(DiffType.CREATE, + DFSUtil.string2Bytes("dir3/file1")), + new DiffReportEntry(DiffType.CREATE, + DFSUtil.string2Bytes("dir3/file2")), + new DiffReportEntry(DiffType.CREATE, + DFSUtil.string2Bytes("dir3/file3"))); + } + + @Test + public void testDiffReportWithRpcLimit2() throws Exception { + final Path root = new Path("/"); + hdfs.mkdirs(root); + for (int i = 1; i <=3; i++) { + final Path path = new Path(root, "dir" + i); + hdfs.mkdirs(path); + } + for (int i = 1; i <= 3; i++) { + final Path path = new Path(root, "dir" + i); + for (int j = 1; j < 4; j++) { + final Path file = new Path(path, "file" + j); + DFSTestUtil.createFile(hdfs, file, BLOCKSIZE, REPLICATION, SEED); + } + } + SnapshotTestHelper.createSnapshot(hdfs, root, "s0"); + Path targetDir = new Path(root, "dir4"); + //create directory dir4 + hdfs.mkdirs(targetDir); + //moves files from dir1 to dir4 + Path path = new Path(root, "dir1"); + for (int j = 1; j < 4; j++) { + final Path srcPath = new Path(path, "file" + j); + final Path targetPath = new Path(targetDir, "file" + j); + hdfs.rename(srcPath, targetPath); + } + targetDir = new Path(root, "dir3"); + //overwrite existing files in dir3 from files in dir1 + path = new Path(root, "dir2"); + for (int j = 1; j < 4; j++) { + final Path srcPath = new Path(path, "file" + j); + final Path targetPath = new Path(targetDir, "file" + j); + hdfs.rename(srcPath, targetPath, Rename.OVERWRITE); + } + final Path pathToRename = new Path(root, "dir2"); + //move dir2 inside dir3 + hdfs.rename(pathToRename, targetDir); + SnapshotTestHelper.createSnapshot(hdfs, root, "s1"); + verifyDiffReport(root, "s0", "s1", + new DiffReportEntry(DiffType.MODIFY, DFSUtil.string2Bytes("")), + new DiffReportEntry(DiffType.CREATE, + DFSUtil.string2Bytes("dir4")), + new DiffReportEntry(DiffType.RENAME, DFSUtil.string2Bytes("dir2"), + DFSUtil.string2Bytes("dir3/dir2")), + new DiffReportEntry(DiffType.MODIFY, DFSUtil.string2Bytes("dir1")), + new DiffReportEntry(DiffType.RENAME, DFSUtil.string2Bytes("dir1/file1"), + DFSUtil.string2Bytes("dir4/file1")), + new DiffReportEntry(DiffType.RENAME, DFSUtil.string2Bytes("dir1/file2"), + DFSUtil.string2Bytes("dir4/file2")), + new DiffReportEntry(DiffType.RENAME, DFSUtil.string2Bytes("dir1/file3"), + DFSUtil.string2Bytes("dir4/file3")), + new DiffReportEntry(DiffType.MODIFY, DFSUtil.string2Bytes("dir2")), + new DiffReportEntry(DiffType.RENAME, DFSUtil.string2Bytes("dir2/file1"), + DFSUtil.string2Bytes("dir3/file1")), + new DiffReportEntry(DiffType.RENAME, DFSUtil.string2Bytes("dir2/file2"), + DFSUtil.string2Bytes("dir3/file2")), + new DiffReportEntry(DiffType.RENAME, DFSUtil.string2Bytes("dir2/file3"), + DFSUtil.string2Bytes("dir3/file3")), + new DiffReportEntry(DiffType.MODIFY, DFSUtil.string2Bytes("dir3")), + new DiffReportEntry(DiffType.DELETE, + DFSUtil.string2Bytes("dir3/file1")), + new DiffReportEntry(DiffType.DELETE, + DFSUtil.string2Bytes("dir3/file1")), + new DiffReportEntry(DiffType.DELETE, + DFSUtil.string2Bytes("dir3/file3"))); + } } --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org