hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r423054 - in /lucene/hadoop/trunk/src/test/org/apache/hadoop/fs: DistributedFSCheck.java TestDFSIO.java
Date Tue, 18 Jul 2006 11:38:02 GMT
Author: cutting
Date: Tue Jul 18 04:38:01 2006
New Revision: 423054

URL: http://svn.apache.org/viewvc?rev=423054&view=rev
Log:
HADOOP-368.  Improvements to DistributedFSCheck and TestDFSIO.  Contributed by Konstantin.

Modified:
    lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDFSIO.java

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java?rev=423054&r1=423053&r2=423054&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java Tue Jul 18 04:38:01
2006
@@ -81,10 +81,11 @@
   public void testFSBlocks( String rootName ) throws Exception {
     createInputFile(rootName);
     runDistributedFSCheck();
+    cleanup();  // clean up after all to restore the system state
   }
 
   private void createInputFile( String rootName ) throws IOException {
-    fs.delete(MAP_INPUT_DIR);
+    cleanup();  // clean up if previous run failed
 
     Path inputFile = new Path(MAP_INPUT_DIR, "in_file");
     SequenceFile.Writer writer =
@@ -133,18 +134,22 @@
                         long offset 
                         ) throws IOException {
       // open file
-      DataInputStream in;
-      in = new DataInputStream(fs.open(new Path(name)));
+      FSDataInputStream in = null;
+      try {
+        in = fs.open(new Path(name));
+      } catch( IOException e ) {
+        return name + "@(missing)";
+      }
+      in.seek( offset );
       long actualSize = 0;
       try {
         long blockSize = fs.getDefaultBlockSize();
-        int curSize = bufferSize;
-        for(  actualSize = 0; 
+        reporter.setStatus( "reading " + name + "@" + 
+            offset + "/" + blockSize );
+        for(  int curSize = bufferSize; 
               curSize == bufferSize && actualSize < blockSize;
               actualSize += curSize) {
-          curSize = in.read( buffer, (int)offset, Math.min(bufferSize, (int)(blockSize -
actualSize)) );
-          reporter.setStatus( "reading " + name + "@" + 
-                              offset + "/" + blockSize );
+          curSize = in.read( buffer, 0, bufferSize );
         }
       } catch( IOException e ) {
         LOG.info( "Corrupted block detected in \"" + name + "\" at " + offset );
@@ -178,7 +183,6 @@
   }
   
   private void runDistributedFSCheck() throws Exception {
-    fs.delete(READ_DIR);
     JobConf job = new JobConf( fs.getConf(), DistributedFSCheck.class );
 
     job.setInputPath(MAP_INPUT_DIR);
@@ -240,6 +244,7 @@
     long execTime = System.currentTimeMillis() - tStart;
     
     test.analyzeResult( execTime, resFileName, viewStats );
+    // test.cleanup();  // clean up after all to restore the system state
   }
   
   private void analyzeResult( long execTime,
@@ -318,7 +323,7 @@
     }
   }
 
-  private void cleanup() throws Exception {
+  private void cleanup() throws IOException {
     LOG.info( "Cleaning up test files" );
     fs.delete(TEST_ROOT_DIR);
   }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDFSIO.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDFSIO.java?rev=423054&r1=423053&r2=423054&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDFSIO.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDFSIO.java Tue Jul 18 04:38:01 2006
@@ -99,6 +99,7 @@
     createControlFile(fs, fileSize, nrFiles);
     writeTest(fs);
     readTest(fs);
+    cleanup(fs);
   }
 
   private static void createControlFile(
@@ -249,8 +250,7 @@
                         ) throws IOException {
       totalSize *= MEGA;
       // open file
-      DataInputStream in;
-      in = new DataInputStream(fs.open(new Path(DATA_DIR, name)));
+      DataInputStream in = fs.open(new Path(DATA_DIR, name));
       try {
         long actualSize = 0;
         for( int curSize = bufferSize; curSize == bufferSize; ) {
@@ -425,7 +425,7 @@
     }
   }
 
-  private static void cleanup( FileSystem fs ) throws Exception {
+  private static void cleanup( FileSystem fs ) throws IOException {
     LOG.info( "Cleaning up test files" );
     fs.delete(new Path(TEST_ROOT_DIR));
   }



Mime
View raw message