hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r496844 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/dfs/DFSClient.java
Date Tue, 16 Jan 2007 20:02:11 GMT
Author: cutting
Date: Tue Jan 16 12:02:10 2007
New Revision: 496844

URL: http://svn.apache.org/viewvc?view=rev&rev=496844
Log:
HADOOP-757.  Fix 'Bad File Descriptor' exception in HDFS client when an output file is closed
twice.  Contributed by Raghu.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=496844&r1=496843&r2=496844
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Tue Jan 16 12:02:10 2007
@@ -14,6 +14,9 @@
  3. HADOOP-852.  Add an ant task to compile record definitions, and
     use it to compile record unit tests.  (Milind Bhandarkar via cutting)
 
+ 4. HADOOP-757.  Fix "Bad File Descriptor" exception in HDFS client
+    when an output file is closed twice.  (Raghu Angadi via cutting)
+
 
 Release 0.10.1 - 2007-01-10
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java?view=diff&rev=496844&r1=496843&r2=496844
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java Tue Jan 16 12:02:10
2007
@@ -936,6 +936,25 @@
             }
         }
 
+        /* Wrapper for closing backupStream. This sets backupStream to null so
+         * that we do not attempt to write to backupStream that could be
+         * invalid in subsequent writes. Otherwise we might end trying to write
+         * filedescriptor that we don't own.
+         */
+        private void closeBackupStream() throws IOException {
+          OutputStream stream = backupStream;
+          backupStream = null;
+          stream.close();
+        }
+        /* Similar to closeBackupStream(). Theoritically deleting a file
+         * twice could result in deleting a file that we should not.
+         */
+        private void deleteBackupFile() {
+          File file = backupFile;
+          backupFile = null;
+          file.delete();
+        }
+        
         private File newBackupFile() throws IOException {
           File result = conf.getFile("dfs.client.buffer.dir",
                                      "tmp"+File.separator+
@@ -1147,6 +1166,10 @@
             int workingPos = Math.min(pos, maxPos);
             
             if (workingPos > 0) {
+                if ( backupStream == null ) {
+                    throw new IOException( "Trying to write to backupStream " +
+                                           "but it already closed or not open");
+                }
                 //
                 // To the local block backup, write just the bytes
                 //
@@ -1168,7 +1191,7 @@
             //
             // Done with local copy
             //
-            backupStream.close();
+            closeBackupStream();
 
             //
             // Send it to datanode
@@ -1204,10 +1227,11 @@
             //
             // Delete local backup, start new one
             //
-            backupFile.delete();
-            backupFile = newBackupFile();
-            backupStream = new FileOutputStream(backupFile);
+            deleteBackupFile();
+            File tmpFile = newBackupFile();
             bytesWrittenToBlock = 0;
+            backupStream = new FileOutputStream(tmpFile);
+            backupFile = tmpFile;
         }
 
         /**
@@ -1273,8 +1297,12 @@
               }
             }
             
-            backupStream.close();
-            backupFile.delete();
+            if ( backupStream != null ) {
+              closeBackupStream();
+            }
+            if ( backupFile != null ) {
+              deleteBackupFile();
+            }
 
             if (s != null) {
                 s.close();



Mime
View raw message