hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hair...@apache.org
Subject svn commit: r780114 - in /hadoop/core/trunk: ./ src/core/org/apache/hadoop/fs/ src/core/org/apache/hadoop/io/ src/core/org/apache/hadoop/metrics/ src/hdfs/org/apache/hadoop/hdfs/server/datanode/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/test/
Date Fri, 29 May 2009 21:27:53 GMT
Author: hairong
Date: Fri May 29 21:27:51 2009
New Revision: 780114

URL: http://svn.apache.org/viewvc?rev=780114&view=rev
Log:
HADOOP-5864. Fix DMI and OBL findbugs in packages hdfs and metrics. Contributed by Hairong
Kuang.

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java
    hadoop/core/trunk/src/core/org/apache/hadoop/io/IOUtils.java
    hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
    hadoop/core/trunk/src/test/findbugsExcludeFile.xml

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=780114&r1=780113&r2=780114&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Fri May 29 21:27:51 2009
@@ -726,6 +726,9 @@
     HADOOP-5940. trunk eclipse-plugin build fails while trying to copy 
     commons-cli jar from the lib dir (Giridharan Kesavan via gkesavan)
 
+    HADOOP-5864. Fix DMI and OBL findbugs in packages hdfs and metrics.
+    (hairong)
+
 Release 0.20.1 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java?rev=780114&r1=780113&r2=780114&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java Fri May 29 21:27:51 2009
@@ -1170,10 +1170,13 @@
 
     while (true) {
       FSDataInputStream in = srcFs.open(path);
-      in.seek(offset);
-      IOUtils.copyBytes(in, System.out, 1024, false);
-      offset = in.getPos();
-      in.close();
+      try {
+        in.seek(offset);
+        IOUtils.copyBytes(in, System.out, 1024);
+        offset = in.getPos();
+      } finally {
+        in.close();
+      }
       if (!foption) {
         break;
       }

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/io/IOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/IOUtils.java?rev=780114&r1=780113&r2=780114&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/io/IOUtils.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/io/IOUtils.java Fri May 29 21:27:51 2009
@@ -41,17 +41,8 @@
   public static void copyBytes(InputStream in, OutputStream out, int buffSize, boolean close)

     throws IOException {
 
-    PrintStream ps = out instanceof PrintStream ? (PrintStream)out : null;
-    byte buf[] = new byte[buffSize];
     try {
-      int bytesRead = in.read(buf);
-      while (bytesRead >= 0) {
-        out.write(buf, 0, bytesRead);
-        if ((ps != null) && ps.checkError()) {
-          throw new IOException("Unable to write to output stream.");
-        }
-        bytesRead = in.read(buf);
-      }
+      copyBytes(in, out, buffSize);
     } finally {
       if(close) {
         out.close();
@@ -61,6 +52,27 @@
   }
   
   /**
+   * Copies from one stream to another.
+   * 
+   * @param in InputStrem to read from
+   * @param out OutputStream to write to
+   * @param buffSize the size of the buffer 
+   */
+  public static void copyBytes(InputStream in, OutputStream out, int buffSize) 
+    throws IOException {
+
+    PrintStream ps = out instanceof PrintStream ? (PrintStream)out : null;
+    byte buf[] = new byte[buffSize];
+    int bytesRead = in.read(buf);
+    while (bytesRead >= 0) {
+      out.write(buf, 0, bytesRead);
+      if ((ps != null) && ps.checkError()) {
+        throw new IOException("Unable to write to output stream.");
+      }
+      bytesRead = in.read(buf);
+    }
+  }
+  /**
    * Copies from one stream to another. <strong>closes the input and output streams

    * at the end</strong>.
    * @param in InputStrem to read from

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java?rev=780114&r1=780113&r2=780114&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java Fri May 29 21:27:51
2009
@@ -188,16 +188,19 @@
   private void setAttributes() throws IOException {
     InputStream is = getClass().getResourceAsStream(PROPERTIES_FILE);
     if (is != null) {
-      Properties properties = new Properties();
-      properties.load(is);
-      //for (Object propertyNameObj : properties.keySet()) {
-      Iterator it = properties.keySet().iterator();
-      while (it.hasNext()) {
-        String propertyName = (String) it.next();
-        String propertyValue = properties.getProperty(propertyName);
-        setAttribute(propertyName, propertyValue);
+      try {
+        Properties properties = new Properties();
+        properties.load(is);
+        //for (Object propertyNameObj : properties.keySet()) {
+        Iterator it = properties.keySet().iterator();
+        while (it.hasNext()) {
+          String propertyName = (String) it.next();
+          String propertyValue = properties.getProperty(propertyName);
+          setAttribute(propertyName, propertyValue);
+        }
+      } finally {
+        is.close();
       }
-      is.close();
     }
   }
     

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataStorage.java?rev=780114&r1=780113&r2=780114&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataStorage.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataStorage.java Fri
May 29 21:27:51 2009
@@ -363,8 +363,17 @@
   static void linkBlocks(File from, File to, int oldLV) throws IOException {
     if (!from.isDirectory()) {
       if (from.getName().startsWith(COPY_FILE_PREFIX)) {
-        IOUtils.copyBytes(new FileInputStream(from), 
-                          new FileOutputStream(to), 16*1024, true);
+        FileInputStream in = new FileInputStream(from);
+        try {
+          FileOutputStream out = new FileOutputStream(to);
+          try {
+            IOUtils.copyBytes(in, out, 16*1024);
+          } finally {
+            out.close();
+          }
+        } finally {
+          in.close();
+        }
       } else {
         
         //check if we are upgrading from pre-generation stamp version.

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java?rev=780114&r1=780113&r2=780114&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java
(original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java
Fri May 29 21:27:51 2009
@@ -86,9 +86,17 @@
   private void detachFile(File file, Block b) throws IOException {
     File tmpFile = volume.createDetachFile(b, file.getName());
     try {
-      IOUtils.copyBytes(new FileInputStream(file),
-                        new FileOutputStream(tmpFile),
-                        16*1024, true);
+      FileInputStream in = new FileInputStream(file);
+      try {
+        FileOutputStream out = new FileOutputStream(tmpFile);
+        try {
+          IOUtils.copyBytes(in, out, 16*1024);
+        } finally {
+          out.close();
+        }
+      } finally {
+        in.close();
+      }
       if (file.length() != tmpFile.length()) {
         throw new IOException("Copy of file " + file + " size " + file.length()+
                               " into file " + tmpFile +

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java?rev=780114&r1=780113&r2=780114&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
(original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
Fri May 29 21:27:51 2009
@@ -160,7 +160,8 @@
    */
   int getExistingPathINodes(byte[][] components, INode[] existing) {
     assert compareBytes(this.name, components[0]) == 0 :
-      "Incorrect name " + getLocalName() + " expected " + components[0];
+      "Incorrect name " + getLocalName() + " expected " + 
+      bytes2String(components[0]);
 
     INode curNode = this;
     int count = 0;

Modified: hadoop/core/trunk/src/test/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/findbugsExcludeFile.xml?rev=780114&r1=780113&r2=780114&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/findbugsExcludeFile.xml (original)
+++ hadoop/core/trunk/src/test/findbugsExcludeFile.xml Fri May 29 21:27:51 2009
@@ -220,9 +220,17 @@
        <Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
      </Match>
 
+     <!--
+       CreateBlockWriteStreams and getTmpInputStreams are pretty much like a stream constructor.
+       The newly created streams are not supposed to be closed in the constructor. So ignore
+       the OBL warning.
+     -->
      <Match>
-       <Class name="org.apache.hadoop.examples.ContextFactory" />
-       <Method name="setAttributes" />
+       <Class name="org.apache.hadoop.hdfs.server.datanode.FSDataset" />
+       <Or>
+         <Method name="createBlockWriteStreams" />
+         <Method name="getTmpInputStreams" />
+       </Or>
        <Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
      </Match>
 



Mime
View raw message