hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zjs...@apache.org
Subject [13/50] [abbrv] hadoop git commit: HDFS-8607. TestFileCorruption doesn't work as expected. (Contributed by Walter Su)
Date Thu, 18 Jun 2015 18:24:51 GMT
HDFS-8607. TestFileCorruption doesn't work as expected. (Contributed by Walter Su)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e447ae3f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e447ae3f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e447ae3f

Branch: refs/heads/YARN-2928
Commit: e447ae3f2923a7c4cb82672553b3cf6384601bac
Parents: 30d1fb0
Author: Arpit Agarwal <arp@apache.org>
Authored: Mon Jun 15 10:11:53 2015 -0700
Committer: Zhijie Shen <zjshen@apache.org>
Committed: Thu Jun 18 11:10:05 2015 -0700

----------------------------------------------------------------------
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt      |  3 +++
 .../apache/hadoop/hdfs/TestFileCorruption.java   | 19 +++++++++++--------
 2 files changed, 14 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e447ae3f/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index cb2679a..c98d918 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -889,6 +889,9 @@ Release 2.8.0 - UNRELEASED
     HDFS-8593. Calculation of effective layout version mishandles comparison to
     current layout version in storage. (cnauroth)
 
+    HDFS-8607. TestFileCorruption doesn't work as expected. (Walter Su via
+    Arpit Agarwal)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e447ae3f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCorruption.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCorruption.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCorruption.java
index 8001bfb..8e0ffe7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCorruption.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCorruption.java
@@ -27,8 +27,12 @@ import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.List;
 
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.filefilter.DirectoryFileFilter;
+import org.apache.commons.io.filefilter.PrefixFileFilter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ChecksumException;
 import org.apache.hadoop.fs.FileSystem;
@@ -74,14 +78,13 @@ public class TestFileCorruption {
       String bpid = cluster.getNamesystem().getBlockPoolId();
       File data_dir = MiniDFSCluster.getFinalizedDir(storageDir, bpid);
       assertTrue("data directory does not exist", data_dir.exists());
-      File[] blocks = data_dir.listFiles();
-      assertTrue("Blocks do not exist in data-dir", (blocks != null) && (blocks.length
> 0));
-      for (int idx = 0; idx < blocks.length; idx++) {
-        if (!blocks[idx].getName().startsWith(Block.BLOCK_FILE_PREFIX)) {
-          continue;
-        }
-        System.out.println("Deliberately removing file "+blocks[idx].getName());
-        assertTrue("Cannot remove file.", blocks[idx].delete());
+      Collection<File> blocks = FileUtils.listFiles(data_dir,
+          new PrefixFileFilter(Block.BLOCK_FILE_PREFIX),
+          DirectoryFileFilter.DIRECTORY);
+      assertTrue("Blocks do not exist in data-dir", blocks.size() > 0);
+      for (File block : blocks) {
+        System.out.println("Deliberately removing file " + block.getName());
+        assertTrue("Cannot remove file.", block.delete());
       }
       assertTrue("Corrupted replicas not handled properly.",
                  util.checkFiles(fs, "/srcdat"));


Mime
View raw message