Return-Path: Delivered-To: apmail-hadoop-core-commits-archive@www.apache.org Received: (qmail 19934 invoked from network); 29 May 2009 21:28:12 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.3) by minotaur.apache.org with SMTP; 29 May 2009 21:28:12 -0000 Received: (qmail 97008 invoked by uid 500); 29 May 2009 21:28:24 -0000 Delivered-To: apmail-hadoop-core-commits-archive@hadoop.apache.org Received: (qmail 96927 invoked by uid 500); 29 May 2009 21:28:24 -0000 Mailing-List: contact core-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: core-dev@hadoop.apache.org Delivered-To: mailing list core-commits@hadoop.apache.org Received: (qmail 96918 invoked by uid 99); 29 May 2009 21:28:24 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 29 May 2009 21:28:24 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 29 May 2009 21:28:20 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id C053A238888D; Fri, 29 May 2009 21:27:53 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r780114 - in /hadoop/core/trunk: ./ src/core/org/apache/hadoop/fs/ src/core/org/apache/hadoop/io/ src/core/org/apache/hadoop/metrics/ src/hdfs/org/apache/hadoop/hdfs/server/datanode/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/test/ Date: Fri, 29 May 2009 21:27:53 -0000 To: core-commits@hadoop.apache.org From: hairong@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20090529212754.C053A238888D@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: hairong Date: Fri May 29 21:27:51 2009 New Revision: 780114 URL: http://svn.apache.org/viewvc?rev=780114&view=rev Log: HADOOP-5864. Fix DMI and OBL findbugs in packages hdfs and metrics. Contributed by Hairong Kuang. Modified: hadoop/core/trunk/CHANGES.txt hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java hadoop/core/trunk/src/core/org/apache/hadoop/io/IOUtils.java hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataStorage.java hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java hadoop/core/trunk/src/test/findbugsExcludeFile.xml Modified: hadoop/core/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=780114&r1=780113&r2=780114&view=diff ============================================================================== --- hadoop/core/trunk/CHANGES.txt (original) +++ hadoop/core/trunk/CHANGES.txt Fri May 29 21:27:51 2009 @@ -726,6 +726,9 @@ HADOOP-5940. trunk eclipse-plugin build fails while trying to copy commons-cli jar from the lib dir (Giridharan Kesavan via gkesavan) + HADOOP-5864. Fix DMI and OBL findbugs in packages hdfs and metrics. + (hairong) + Release 0.20.1 - Unreleased INCOMPATIBLE CHANGES Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java?rev=780114&r1=780113&r2=780114&view=diff ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java (original) +++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/FsShell.java Fri May 29 21:27:51 2009 @@ -1170,10 +1170,13 @@ while (true) { FSDataInputStream in = srcFs.open(path); - in.seek(offset); - IOUtils.copyBytes(in, System.out, 1024, false); - offset = in.getPos(); - in.close(); + try { + in.seek(offset); + IOUtils.copyBytes(in, System.out, 1024); + offset = in.getPos(); + } finally { + in.close(); + } if (!foption) { break; } Modified: hadoop/core/trunk/src/core/org/apache/hadoop/io/IOUtils.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/IOUtils.java?rev=780114&r1=780113&r2=780114&view=diff ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/io/IOUtils.java (original) +++ hadoop/core/trunk/src/core/org/apache/hadoop/io/IOUtils.java Fri May 29 21:27:51 2009 @@ -41,17 +41,8 @@ public static void copyBytes(InputStream in, OutputStream out, int buffSize, boolean close) throws IOException { - PrintStream ps = out instanceof PrintStream ? (PrintStream)out : null; - byte buf[] = new byte[buffSize]; try { - int bytesRead = in.read(buf); - while (bytesRead >= 0) { - out.write(buf, 0, bytesRead); - if ((ps != null) && ps.checkError()) { - throw new IOException("Unable to write to output stream."); - } - bytesRead = in.read(buf); - } + copyBytes(in, out, buffSize); } finally { if(close) { out.close(); @@ -61,6 +52,27 @@ } /** + * Copies from one stream to another. + * + * @param in InputStrem to read from + * @param out OutputStream to write to + * @param buffSize the size of the buffer + */ + public static void copyBytes(InputStream in, OutputStream out, int buffSize) + throws IOException { + + PrintStream ps = out instanceof PrintStream ? (PrintStream)out : null; + byte buf[] = new byte[buffSize]; + int bytesRead = in.read(buf); + while (bytesRead >= 0) { + out.write(buf, 0, bytesRead); + if ((ps != null) && ps.checkError()) { + throw new IOException("Unable to write to output stream."); + } + bytesRead = in.read(buf); + } + } + /** * Copies from one stream to another. closes the input and output streams * at the end. * @param in InputStrem to read from Modified: hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java?rev=780114&r1=780113&r2=780114&view=diff ============================================================================== --- hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java (original) +++ hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ContextFactory.java Fri May 29 21:27:51 2009 @@ -188,16 +188,19 @@ private void setAttributes() throws IOException { InputStream is = getClass().getResourceAsStream(PROPERTIES_FILE); if (is != null) { - Properties properties = new Properties(); - properties.load(is); - //for (Object propertyNameObj : properties.keySet()) { - Iterator it = properties.keySet().iterator(); - while (it.hasNext()) { - String propertyName = (String) it.next(); - String propertyValue = properties.getProperty(propertyName); - setAttribute(propertyName, propertyValue); + try { + Properties properties = new Properties(); + properties.load(is); + //for (Object propertyNameObj : properties.keySet()) { + Iterator it = properties.keySet().iterator(); + while (it.hasNext()) { + String propertyName = (String) it.next(); + String propertyValue = properties.getProperty(propertyName); + setAttribute(propertyName, propertyValue); + } + } finally { + is.close(); } - is.close(); } } Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataStorage.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataStorage.java?rev=780114&r1=780113&r2=780114&view=diff ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataStorage.java (original) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataStorage.java Fri May 29 21:27:51 2009 @@ -363,8 +363,17 @@ static void linkBlocks(File from, File to, int oldLV) throws IOException { if (!from.isDirectory()) { if (from.getName().startsWith(COPY_FILE_PREFIX)) { - IOUtils.copyBytes(new FileInputStream(from), - new FileOutputStream(to), 16*1024, true); + FileInputStream in = new FileInputStream(from); + try { + FileOutputStream out = new FileOutputStream(to); + try { + IOUtils.copyBytes(in, out, 16*1024); + } finally { + out.close(); + } + } finally { + in.close(); + } } else { //check if we are upgrading from pre-generation stamp version. Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java?rev=780114&r1=780113&r2=780114&view=diff ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java (original) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java Fri May 29 21:27:51 2009 @@ -86,9 +86,17 @@ private void detachFile(File file, Block b) throws IOException { File tmpFile = volume.createDetachFile(b, file.getName()); try { - IOUtils.copyBytes(new FileInputStream(file), - new FileOutputStream(tmpFile), - 16*1024, true); + FileInputStream in = new FileInputStream(file); + try { + FileOutputStream out = new FileOutputStream(tmpFile); + try { + IOUtils.copyBytes(in, out, 16*1024); + } finally { + out.close(); + } + } finally { + in.close(); + } if (file.length() != tmpFile.length()) { throw new IOException("Copy of file " + file + " size " + file.length()+ " into file " + tmpFile + Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java?rev=780114&r1=780113&r2=780114&view=diff ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java (original) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java Fri May 29 21:27:51 2009 @@ -160,7 +160,8 @@ */ int getExistingPathINodes(byte[][] components, INode[] existing) { assert compareBytes(this.name, components[0]) == 0 : - "Incorrect name " + getLocalName() + " expected " + components[0]; + "Incorrect name " + getLocalName() + " expected " + + bytes2String(components[0]); INode curNode = this; int count = 0; Modified: hadoop/core/trunk/src/test/findbugsExcludeFile.xml URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/findbugsExcludeFile.xml?rev=780114&r1=780113&r2=780114&view=diff ============================================================================== --- hadoop/core/trunk/src/test/findbugsExcludeFile.xml (original) +++ hadoop/core/trunk/src/test/findbugsExcludeFile.xml Fri May 29 21:27:51 2009 @@ -220,9 +220,17 @@ + - - + + + + +