Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 0EFC310813 for ; Wed, 5 Jun 2013 23:33:17 +0000 (UTC) Received: (qmail 64245 invoked by uid 500); 5 Jun 2013 23:33:17 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 64073 invoked by uid 500); 5 Jun 2013 23:33:16 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 64065 invoked by uid 99); 5 Jun 2013 23:33:16 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 05 Jun 2013 23:33:16 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 05 Jun 2013 23:33:14 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id B2831238889B; Wed, 5 Jun 2013 23:32:54 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1490080 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/server/namenode/ src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ src/test/java/org/apache/hadoop/hdfs/ src/test/java/... Date: Wed, 05 Jun 2013 23:32:54 -0000 To: hdfs-commits@hadoop.apache.org From: jing9@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20130605233254.B2831238889B@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: jing9 Date: Wed Jun 5 23:32:53 2013 New Revision: 1490080 URL: http://svn.apache.org/r1490080 Log: HDFS-4850. Fix OfflineImageViewer to work on fsimages with empty files or snapshots. Contributed by Jing Zhao. Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodesInPath.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageVisitor.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1490080&r1=1490079&r2=1490080&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Jun 5 23:32:53 2013 @@ -1053,6 +1053,9 @@ Release 2.1.0-beta - UNRELEASED HDFS-4876. Fix the javadoc of FileWithSnapshot and move FileDiffList to FileWithSnapshot. (szetszwo) + HDFS-4850. Fix OfflineImageViewer to work on fsimages with empty files or + snapshots. (jing9) + Release 2.0.5-alpha - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodesInPath.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodesInPath.java?rev=1490080&r1=1490079&r2=1490080&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodesInPath.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodesInPath.java Wed Jun 5 23:32:53 2013 @@ -186,7 +186,7 @@ public class INodesInPath { // check if the next byte[] in components is for ".snapshot" if (isDotSnapshotDir(childName) - && isDir && dir instanceof INodeDirectoryWithSnapshot) { + && isDir && dir instanceof INodeDirectorySnapshottable) { // skip the ".snapshot" in components count++; index++; Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java?rev=1490080&r1=1490079&r2=1490080&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java Wed Jun 5 23:32:53 2013 @@ -177,7 +177,11 @@ class ImageLoaderCurrent implements Imag imageVersion); if (supportSnapshot) { v.visit(ImageElement.SNAPSHOT_COUNTER, in.readInt()); - v.visit(ImageElement.NUM_SNAPSHOTS_TOTAL, in.readInt()); + int numSnapshots = in.readInt(); + v.visit(ImageElement.NUM_SNAPSHOTS_TOTAL, numSnapshots); + for (int i = 0; i < numSnapshots; i++) { + processSnapshot(in, v); + } } if (LayoutVersion.supports(Feature.FSIMAGE_COMPRESSION, imageVersion)) { @@ -335,8 +339,8 @@ class ImageLoaderCurrent implements Imag v.visitEnclosingElement(ImageElement.BLOCKS, ImageElement.NUM_BLOCKS, numBlocks); - // directory or symlink, no blocks to process - if(numBlocks == -1 || numBlocks == -2) { + // directory or symlink or reference node, no blocks to process + if(numBlocks < 0) { v.leaveEnclosingElement(); // Blocks return; } @@ -484,10 +488,6 @@ class ImageLoaderCurrent implements Imag // process snapshot v.visitEnclosingElement(ImageElement.SNAPSHOT); v.visit(ImageElement.SNAPSHOT_ID, in.readInt()); - // process root of snapshot - v.visitEnclosingElement(ImageElement.SNAPSHOT_ROOT); - processINode(in, v, true, rootName, false); - v.leaveEnclosingElement(); v.leaveEnclosingElement(); } v.visit(ImageElement.SNAPSHOT_QUOTA, in.readInt()); @@ -495,6 +495,17 @@ class ImageLoaderCurrent implements Imag } } + private void processSnapshot(DataInputStream in, ImageVisitor v) + throws IOException { + v.visitEnclosingElement(ImageElement.SNAPSHOT); + v.visit(ImageElement.SNAPSHOT_ID, in.readInt()); + // process root of snapshot + v.visitEnclosingElement(ImageElement.SNAPSHOT_ROOT); + processINode(in, v, true, "", false); + v.leaveEnclosingElement(); + v.leaveEnclosingElement(); + } + private void processDirectoryDiffList(DataInputStream in, ImageVisitor v, String currentINodeName) throws IOException { final int numDirDiff = in.readInt(); @@ -512,8 +523,8 @@ class ImageLoaderCurrent implements Imag private void processDirectoryDiff(DataInputStream in, ImageVisitor v, String currentINodeName) throws IOException { v.visitEnclosingElement(ImageElement.SNAPSHOT_DIR_DIFF); - String snapshot = FSImageSerialization.readString(in); - v.visit(ImageElement.SNAPSHOT_DIFF_SNAPSHOTROOT, snapshot); + int snapshotId = in.readInt(); + v.visit(ImageElement.SNAPSHOT_DIFF_SNAPSHOTID, snapshotId); v.visit(ImageElement.SNAPSHOT_DIR_DIFF_CHILDREN_SIZE, in.readInt()); // process snapshotINode @@ -617,7 +628,7 @@ class ImageLoaderCurrent implements Imag processBlocks(in, v, numBlocks, skipBlocks); - if (numBlocks > 0) { // File + if (numBlocks >= 0) { // File if (supportSnapshot) { // process file diffs processFileDiffList(in, v, parentName); @@ -631,6 +642,7 @@ class ImageLoaderCurrent implements Imag } } } + processPermission(in, v); } else if (numBlocks == -1) { // Directory if (supportSnapshot && supportInodeId) { dirNodeMap.put(inodeId, pathName); @@ -647,6 +659,7 @@ class ImageLoaderCurrent implements Imag v.visit(ImageElement.IS_SNAPSHOTTABLE_DIR, Boolean.toString(snapshottable)); } } + processPermission(in, v); } else if (numBlocks == -2) { v.visit(ImageElement.SYMLINK, Text.readString(in)); } else if (numBlocks == -3) { // reference node @@ -668,7 +681,6 @@ class ImageLoaderCurrent implements Imag } } - processPermission(in, v); v.leaveEnclosingElement(); // INode } @@ -678,18 +690,27 @@ class ImageLoaderCurrent implements Imag if (size >= 0) { v.visitEnclosingElement(ImageElement.SNAPSHOT_FILE_DIFFS, ImageElement.NUM_SNAPSHOT_FILE_DIFF, size); - String snapshot = FSImageSerialization.readString(in); - v.visit(ImageElement.SNAPSHOT_DIFF_SNAPSHOTROOT, snapshot); - v.visit(ImageElement.SNAPSHOT_FILE_SIZE, in.readLong()); - if (in.readBoolean()) { - v.visitEnclosingElement(ImageElement.SNAPSHOT_DIFF_SNAPSHOTINODE); - processINode(in, v, true, currentINodeName, true); - v.leaveEnclosingElement(); + for (int i = 0; i < size; i++) { + processFileDiff(in, v, currentINodeName); } v.leaveEnclosingElement(); } } + private void processFileDiff(DataInputStream in, ImageVisitor v, + String currentINodeName) throws IOException { + int snapshotId = in.readInt(); + v.visitEnclosingElement(ImageElement.SNAPSHOT_FILE_DIFF, + ImageElement.SNAPSHOT_DIFF_SNAPSHOTID, snapshotId); + v.visit(ImageElement.SNAPSHOT_FILE_SIZE, in.readLong()); + if (in.readBoolean()) { + v.visitEnclosingElement(ImageElement.SNAPSHOT_DIFF_SNAPSHOTINODE); + processINode(in, v, true, currentINodeName, true); + v.leaveEnclosingElement(); + } + v.leaveEnclosingElement(); + } + /** * Helper method to format dates during processing. * @param date Date as read from image file Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageVisitor.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageVisitor.java?rev=1490080&r1=1490079&r2=1490080&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageVisitor.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageVisitor.java Wed Jun 5 23:32:53 2013 @@ -95,7 +95,7 @@ abstract class ImageVisitor { NUM_SNAPSHOT_DIR_DIFF, SNAPSHOT_DIR_DIFFS, SNAPSHOT_DIR_DIFF, - SNAPSHOT_DIFF_SNAPSHOTROOT, + SNAPSHOT_DIFF_SNAPSHOTID, SNAPSHOT_DIR_DIFF_CHILDREN_SIZE, SNAPSHOT_DIFF_SNAPSHOTINODE, SNAPSHOT_DIR_DIFF_CREATEDLIST, Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java?rev=1490080&r1=1490079&r2=1490080&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java Wed Jun 5 23:32:53 2013 @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.tools.offlineImageViewer; import java.io.FileOutputStream; -import java.io.FileWriter; import java.io.IOException; import java.io.OutputStreamWriter; Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java?rev=1490080&r1=1490079&r2=1490080&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java Wed Jun 5 23:32:53 2013 @@ -24,7 +24,7 @@ import java.util.LinkedList; * An XmlImageVisitor walks over an fsimage structure and writes out * an equivalent XML document that contains the fsimage's components. */ -class XmlImageVisitor extends TextWriterImageVisitor { +public class XmlImageVisitor extends TextWriterImageVisitor { final private LinkedList tagQ = new LinkedList(); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java?rev=1490080&r1=1490079&r2=1490080&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java Wed Jun 5 23:32:53 2013 @@ -30,9 +30,11 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; +import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; +import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.Socket; @@ -857,6 +859,25 @@ public class DFSTestUtil { return new DatanodeRegistration(getLocalDatanodeID(), new StorageInfo(), new ExportedBlockKeys(), VersionInfo.getVersion()); } + + /** Copy one file's contents into the other **/ + public static void copyFile(File src, File dest) throws IOException { + InputStream in = null; + OutputStream out = null; + + try { + in = new FileInputStream(src); + out = new FileOutputStream(dest); + + byte [] b = new byte[1024]; + while( in.read(b) > 0 ) { + out.write(b); + } + } finally { + if(in != null) in.close(); + if(out != null) out.close(); + } + } public static class Builder { private int maxLevels = 3; Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java?rev=1490080&r1=1490079&r2=1490080&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java Wed Jun 5 23:32:53 2013 @@ -18,6 +18,7 @@ package org.apache.hadoop.hdfs.server.namenode.snapshot; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -44,10 +45,13 @@ import org.apache.hadoop.hdfs.MiniDFSClu import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.server.namenode.FSDirectory; +import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper.TestDirectoryTree; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper.TestDirectoryTree.Node; +import org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer; +import org.apache.hadoop.hdfs.tools.offlineImageViewer.XmlImageVisitor; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; import org.apache.log4j.Level; @@ -68,7 +72,13 @@ public class TestSnapshot { SnapshotTestHelper.disableLogs(); } - private static final long seed = Time.now(); + private static final long seed; + private static final Random random; + static { + seed = Time.now(); + random = new Random(seed); + System.out.println("Random seed: " + seed); + } protected static final short REPLICATION = 3; protected static final int BLOCKSIZE = 1024; /** The number of times snapshots are created for a snapshottable directory */ @@ -81,8 +91,6 @@ public class TestSnapshot { protected static FSNamesystem fsn; protected static FSDirectory fsdir; protected DistributedFileSystem hdfs; - - private static Random random = new Random(seed); private static String testDir = System.getProperty("test.build.data", "build/test/data"); @@ -220,16 +228,57 @@ public class TestSnapshot { @Test public void testSnapshot() throws Throwable { try { - runTestSnapshot(); + runTestSnapshot(SNAPSHOT_ITERATION_NUMBER); } catch(Throwable t) { SnapshotTestHelper.LOG.info("FAILED", t); SnapshotTestHelper.dumpTree("FAILED", cluster); throw t; } } + + /** + * Test if the OfflineImageViewer can correctly parse a fsimage containing + * snapshots + */ + @Test + public void testOfflineImageViewer() throws Throwable { + runTestSnapshot(SNAPSHOT_ITERATION_NUMBER); + + // retrieve the fsimage. Note that we already save namespace to fsimage at + // the end of each iteration of runTestSnapshot. + File originalFsimage = FSImageTestUtil.findLatestImageFile( + FSImageTestUtil.getFSImage( + cluster.getNameNode()).getStorage().getStorageDir(0)); + assertNotNull("Didn't generate or can't find fsimage", originalFsimage); + + String ROOT = System.getProperty("test.build.data", "build/test/data"); + File testFile = new File(ROOT, "/image"); + String xmlImage = ROOT + "/image_xml"; + boolean success = false; + + try { + DFSTestUtil.copyFile(originalFsimage, testFile); + XmlImageVisitor v = new XmlImageVisitor(xmlImage, true); + OfflineImageViewer oiv = new OfflineImageViewer(testFile.getPath(), v, + true); + oiv.go(); + success = true; + } finally { + if (testFile.exists()) { + testFile.delete(); + } + // delete the xml file if the parsing is successful + if (success) { + File xmlImageFile = new File(xmlImage); + if (xmlImageFile.exists()) { + xmlImageFile.delete(); + } + } + } + } - private void runTestSnapshot() throws Exception { - for (int i = 0; i < SNAPSHOT_ITERATION_NUMBER; i++) { + private void runTestSnapshot(int iteration) throws Exception { + for (int i = 0; i < iteration; i++) { // create snapshot and check the creation cluster.getNamesystem().getSnapshotManager().setAllowNestedSnapshots(true); TestDirectoryTree.Node[] ssNodes = createSnapshots(); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java?rev=1490080&r1=1490079&r2=1490080&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java Wed Jun 5 23:32:53 2013 @@ -47,6 +47,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; @@ -173,7 +174,7 @@ public class TestOfflineImageViewer { File outputFile = new File(ROOT, "/basicCheckOutput"); try { - copyFile(originalFsimage, testFile); + DFSTestUtil.copyFile(originalFsimage, testFile); ImageVisitor v = new LsImageVisitor(outputFile.getPath(), true); OfflineImageViewer oiv = new OfflineImageViewer(testFile.getPath(), v, false); @@ -366,25 +367,6 @@ public class TestOfflineImageViewer { if(out != null) out.close(); } } - - // Copy one file's contents into the other - private void copyFile(File src, File dest) throws IOException { - InputStream in = null; - OutputStream out = null; - - try { - in = new FileInputStream(src); - out = new FileOutputStream(dest); - - byte [] b = new byte[1024]; - while( in.read(b) > 0 ) { - out.write(b); - } - } finally { - if(in != null) in.close(); - if(out != null) out.close(); - } - } @Test public void outputOfFileDistributionVisitor() throws IOException { @@ -394,7 +376,7 @@ public class TestOfflineImageViewer { int totalFiles = 0; BufferedReader reader = null; try { - copyFile(originalFsimage, testFile); + DFSTestUtil.copyFile(originalFsimage, testFile); ImageVisitor v = new FileDistributionVisitor(outputFile.getPath(), 0, 0); OfflineImageViewer oiv = new OfflineImageViewer(testFile.getPath(), v, false); @@ -466,7 +448,7 @@ public class TestOfflineImageViewer { File testFile = new File(ROOT, "/basicCheck"); try { - copyFile(originalFsimage, testFile); + DFSTestUtil.copyFile(originalFsimage, testFile); TestImageVisitor v = new TestImageVisitor(); OfflineImageViewer oiv = new OfflineImageViewer(testFile.getPath(), v, true); oiv.go();