Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id A9B4410EA8 for ; Wed, 26 Mar 2014 21:28:36 +0000 (UTC) Received: (qmail 37055 invoked by uid 500); 26 Mar 2014 21:28:23 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 36873 invoked by uid 500); 26 Mar 2014 21:28:16 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 36776 invoked by uid 99); 26 Mar 2014 21:28:08 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 26 Mar 2014 21:28:08 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 26 Mar 2014 21:27:58 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 570E3238883D; Wed, 26 Mar 2014 21:27:35 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1582068 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/client/ src/main/java/org/apache/hadoop/hdfs/protocol/ src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/ src/main/java/org... Date: Wed, 26 Mar 2014 21:27:34 -0000 To: hdfs-commits@hadoop.apache.org From: suresh@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20140326212735.570E3238883D@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: suresh Date: Wed Mar 26 21:27:33 2014 New Revision: 1582068 URL: http://svn.apache.org/r1582068 Log: HDFS-6155. Fix Boxing/unboxing to parse a primitive findbugs warnings. Contributed by Suresh Srinivas. Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/client/ShortCircuitCache.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JNStorage.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ReplicaMap.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CheckpointSignature.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageTransactionalStorageInspector.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileJournalManager.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/HostFileManager.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/DelimitedImageVisitor.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionVisitor.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFetchImage.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestShortCircuitLocalRead.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/SpotCheckImageVisitor.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Mar 26 21:27:33 2014 @@ -261,6 +261,9 @@ Release 2.5.0 - UNRELEASED HDFS-5196. Provide more snapshot information in WebUI. (Shinichi Yamashita via wheat9) + HDFS-6155. Fix Boxing/unboxing to parse a primitive findbugs warnings. + (suresh) + OPTIMIZATIONS BUG FIXES Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/client/ShortCircuitCache.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/client/ShortCircuitCache.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/client/ShortCircuitCache.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/client/ShortCircuitCache.java Wed Mar 26 21:27:33 2014 @@ -618,9 +618,8 @@ public class ShortCircuitCache implement evictionTimeNs++; } Preconditions.checkState(null == replica.getEvictableTimeNs()); - Long time = Long.valueOf(evictionTimeNs); - replica.setEvictableTimeNs(time); - map.put(time, replica); + replica.setEvictableTimeNs(evictionTimeNs); + map.put(evictionTimeNs, replica); } /** Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java Wed Mar 26 21:27:33 2014 @@ -38,7 +38,7 @@ public class CacheDirectiveIterator public CacheDirectiveIterator(ClientProtocol namenode, CacheDirectiveInfo filter) { - super(Long.valueOf(0)); + super(0L); this.namenode = namenode; this.filter = filter; } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java Wed Mar 26 21:27:33 2014 @@ -52,7 +52,7 @@ public class PipelineAck { String[] ele = conf.get(DFS_DATANODE_OOB_TIMEOUT_KEY, DFS_DATANODE_OOB_TIMEOUT_DEFAULT).split(","); for (int i = 0; i < NUM_OOB_TYPES; i++) { - OOB_TIMEOUT[i] = (i < ele.length) ? Long.valueOf(ele[i]) : 0; + OOB_TIMEOUT[i] = (i < ele.length) ? Long.parseLong(ele[i]) : 0; } } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JNStorage.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JNStorage.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JNStorage.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JNStorage.java Wed Mar 26 21:27:33 2014 @@ -165,7 +165,7 @@ class JNStorage extends Storage { if (matcher.matches()) { // This parsing will always succeed since the group(1) is // /\d+/ in the regex itself. - long txid = Long.valueOf(matcher.group(1)); + long txid = Long.parseLong(matcher.group(1)); if (txid < minTxIdToKeep) { LOG.info("Purging no-longer needed file " + txid); if (!f.delete()) { Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java Wed Mar 26 21:27:33 2014 @@ -1167,7 +1167,7 @@ public class DatanodeManager { port = DFSConfigKeys.DFS_DATANODE_DEFAULT_PORT; } else { hostStr = hostLine.substring(0, idx); - port = Integer.valueOf(hostLine.substring(idx+1)); + port = Integer.parseInt(hostLine.substring(idx+1)); } if (InetAddresses.isInetAddress(hostStr)) { Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceScanner.java Wed Mar 26 21:27:33 2014 @@ -382,11 +382,11 @@ class BlockPoolSliceScanner { try { if (name.equals("id")) { - entry.blockId = Long.valueOf(value); + entry.blockId = Long.parseLong(value); } else if (name.equals("time")) { - entry.verificationTime = Long.valueOf(value); + entry.verificationTime = Long.parseLong(value); } else if (name.equals("genstamp")) { - entry.genStamp = Long.valueOf(value); + entry.genStamp = Long.parseLong(value); } } catch(NumberFormatException nfe) { LOG.warn("Cannot parse line: " + line, nfe); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java Wed Mar 26 21:27:33 2014 @@ -187,7 +187,7 @@ abstract public class ReplicaInfo extend if (!internedBaseDirs.containsKey(replicaDirInfo.baseDirPath)) { // Create a new String path of this file and make a brand new File object // to guarantee we drop the reference to the underlying char[] storage. - File baseDir = new File(new String(replicaDirInfo.baseDirPath)); + File baseDir = new File(replicaDirInfo.baseDirPath); internedBaseDirs.put(replicaDirInfo.baseDirPath, baseDir); } this.baseDir = internedBaseDirs.get(replicaDirInfo.baseDirPath); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ReplicaMap.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ReplicaMap.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ReplicaMap.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ReplicaMap.java Wed Mar 26 21:27:33 2014 @@ -154,7 +154,7 @@ class ReplicaMap { /** * Remove the replica's meta information from the map if present * @param bpid block pool id - * @param the block id of the replica to be removed + * @param blockId block id of the replica to be removed * @return the removed replica's meta information */ ReplicaInfo remove(String bpid, long blockId) { @@ -223,4 +223,4 @@ class ReplicaMap { Object getMutext() { return mutex; } -} \ No newline at end of file +} Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CheckpointSignature.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CheckpointSignature.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CheckpointSignature.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CheckpointSignature.java Wed Mar 26 21:27:33 2014 @@ -22,7 +22,6 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType; -import org.apache.hadoop.hdfs.server.namenode.FSImage; import com.google.common.collect.ComparisonChain; @@ -53,13 +52,13 @@ public class CheckpointSignature extends assert fields.length == NUM_FIELDS : "Must be " + NUM_FIELDS + " fields in CheckpointSignature"; int i = 0; - layoutVersion = Integer.valueOf(fields[i++]); - namespaceID = Integer.valueOf(fields[i++]); - cTime = Long.valueOf(fields[i++]); - mostRecentCheckpointTxId = Long.valueOf(fields[i++]); - curSegmentTxId = Long.valueOf(fields[i++]); + layoutVersion = Integer.parseInt(fields[i++]); + namespaceID = Integer.parseInt(fields[i++]); + cTime = Long.parseLong(fields[i++]); + mostRecentCheckpointTxId = Long.parseLong(fields[i++]); + curSegmentTxId = Long.parseLong(fields[i++]); clusterID = fields[i++]; - blockpoolID = fields[i++]; + blockpoolID = fields[i]; } public CheckpointSignature(StorageInfo info, String blockpoolID, Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java Wed Mar 26 21:27:33 2014 @@ -2943,7 +2943,7 @@ public class FSDirectory implements Clos final String inodeId = DFSUtil.bytes2String(pathComponents[3]); long id = 0; try { - id = Long.valueOf(inodeId); + id = Long.parseLong(inodeId); } catch (NumberFormatException e) { throw new FileNotFoundException("Invalid inode path: " + src); } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java Wed Mar 26 21:27:33 2014 @@ -286,7 +286,7 @@ public abstract class FSEditLogOp { ClientId.toBytes(st.getValue("RPC_CLIENTID")) : RpcConstants.DUMMY_CLIENT_ID; this.rpcCallId = st.hasChildren("RPC_CALLID") ? - Integer.valueOf(st.getValue("RPC_CALLID")) + Integer.parseInt(st.getValue("RPC_CALLID")) : RpcConstants.INVALID_CALL_ID; } @@ -303,7 +303,7 @@ public abstract class FSEditLogOp { XMLUtils.addSaxString(contentHandler, "RPC_CLIENTID", ClientId.toString(clientId)); XMLUtils.addSaxString(contentHandler, "RPC_CALLID", - Integer.valueOf(callId).toString()); + Integer.toString(callId)); } private static final class AclEditLogUtil { @@ -613,18 +613,18 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "LENGTH", - Integer.valueOf(length).toString()); + Integer.toString(length)); XMLUtils.addSaxString(contentHandler, "INODEID", - Long.valueOf(inodeId).toString()); + Long.toString(inodeId)); XMLUtils.addSaxString(contentHandler, "PATH", path); XMLUtils.addSaxString(contentHandler, "REPLICATION", Short.valueOf(replication).toString()); XMLUtils.addSaxString(contentHandler, "MTIME", - Long.valueOf(mtime).toString()); + Long.toString(mtime)); XMLUtils.addSaxString(contentHandler, "ATIME", - Long.valueOf(atime).toString()); + Long.toString(atime)); XMLUtils.addSaxString(contentHandler, "BLOCKSIZE", - Long.valueOf(blockSize).toString()); + Long.toString(blockSize)); XMLUtils.addSaxString(contentHandler, "CLIENT_NAME", clientName); XMLUtils.addSaxString(contentHandler, "CLIENT_MACHINE", clientMachine); for (Block b : blocks) { @@ -641,13 +641,13 @@ public abstract class FSEditLogOp { @Override void fromXml(Stanza st) throws InvalidXmlException { - this.length = Integer.valueOf(st.getValue("LENGTH")); - this.inodeId = Long.valueOf(st.getValue("INODEID")); + this.length = Integer.parseInt(st.getValue("LENGTH")); + this.inodeId = Long.parseLong(st.getValue("INODEID")); this.path = st.getValue("PATH"); this.replication = Short.valueOf(st.getValue("REPLICATION")); - this.mtime = Long.valueOf(st.getValue("MTIME")); - this.atime = Long.valueOf(st.getValue("ATIME")); - this.blockSize = Long.valueOf(st.getValue("BLOCKSIZE")); + this.mtime = Long.parseLong(st.getValue("MTIME")); + this.atime = Long.parseLong(st.getValue("ATIME")); + this.blockSize = Long.parseLong(st.getValue("BLOCKSIZE")); this.clientName = st.getValue("CLIENT_NAME"); this.clientMachine = st.getValue("CLIENT_MACHINE"); if (st.hasChildren("BLOCK")) { @@ -1101,10 +1101,10 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "LENGTH", - Integer.valueOf(length).toString()); + Integer.toString(length)); XMLUtils.addSaxString(contentHandler, "TRG", trg); XMLUtils.addSaxString(contentHandler, "TIMESTAMP", - Long.valueOf(timestamp).toString()); + Long.toString(timestamp)); contentHandler.startElement("", "", "SOURCES", new AttributesImpl()); for (int i = 0; i < srcs.length; ++i) { XMLUtils.addSaxString(contentHandler, @@ -1115,9 +1115,9 @@ public abstract class FSEditLogOp { } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.length = Integer.valueOf(st.getValue("LENGTH")); + this.length = Integer.parseInt(st.getValue("LENGTH")); this.trg = st.getValue("TRG"); - this.timestamp = Long.valueOf(st.getValue("TIMESTAMP")); + this.timestamp = Long.parseLong(st.getValue("TIMESTAMP")); List sources = st.getChildren("SOURCES"); int i = 0; while (true) { @@ -1219,20 +1219,20 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "LENGTH", - Integer.valueOf(length).toString()); + Integer.toString(length)); XMLUtils.addSaxString(contentHandler, "SRC", src); XMLUtils.addSaxString(contentHandler, "DST", dst); XMLUtils.addSaxString(contentHandler, "TIMESTAMP", - Long.valueOf(timestamp).toString()); + Long.toString(timestamp)); appendRpcIdsToXml(contentHandler, rpcClientId, rpcCallId); } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.length = Integer.valueOf(st.getValue("LENGTH")); + this.length = Integer.parseInt(st.getValue("LENGTH")); this.src = st.getValue("SRC"); this.dst = st.getValue("DST"); - this.timestamp = Long.valueOf(st.getValue("TIMESTAMP")); + this.timestamp = Long.parseLong(st.getValue("TIMESTAMP")); readRpcIdsFromXml(st); } @@ -1312,17 +1312,17 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "LENGTH", - Integer.valueOf(length).toString()); + Integer.toString(length)); XMLUtils.addSaxString(contentHandler, "PATH", path); XMLUtils.addSaxString(contentHandler, "TIMESTAMP", - Long.valueOf(timestamp).toString()); + Long.toString(timestamp)); appendRpcIdsToXml(contentHandler, rpcClientId, rpcCallId); } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.length = Integer.valueOf(st.getValue("LENGTH")); + this.length = Integer.parseInt(st.getValue("LENGTH")); this.path = st.getValue("PATH"); - this.timestamp = Long.valueOf(st.getValue("TIMESTAMP")); + this.timestamp = Long.parseLong(st.getValue("TIMESTAMP")); readRpcIdsFromXml(st); } @@ -1451,12 +1451,12 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "LENGTH", - Integer.valueOf(length).toString()); + Integer.toString(length)); XMLUtils.addSaxString(contentHandler, "INODEID", - Long.valueOf(inodeId).toString()); + Long.toString(inodeId)); XMLUtils.addSaxString(contentHandler, "PATH", path); XMLUtils.addSaxString(contentHandler, "TIMESTAMP", - Long.valueOf(timestamp).toString()); + Long.toString(timestamp)); FSEditLogOp.permissionStatusToXml(contentHandler, permissions); if (aclEntries != null) { appendAclEntriesToXml(contentHandler, aclEntries); @@ -1464,10 +1464,10 @@ public abstract class FSEditLogOp { } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.length = Integer.valueOf(st.getValue("LENGTH")); - this.inodeId = Long.valueOf(st.getValue("INODEID")); + this.length = Integer.parseInt(st.getValue("LENGTH")); + this.inodeId = Long.parseLong(st.getValue("INODEID")); this.path = st.getValue("PATH"); - this.timestamp = Long.valueOf(st.getValue("TIMESTAMP")); + this.timestamp = Long.parseLong(st.getValue("TIMESTAMP")); this.permissions = permissionStatusFromXml(st); aclEntries = readAclEntriesFromXml(st); } @@ -1524,11 +1524,11 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "GENSTAMP", - Long.valueOf(genStampV1).toString()); + Long.toString(genStampV1)); } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.genStampV1 = Long.valueOf(st.getValue("GENSTAMP")); + this.genStampV1 = Long.parseLong(st.getValue("GENSTAMP")); } } @@ -1577,11 +1577,11 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "GENSTAMPV2", - Long.valueOf(genStampV2).toString()); + Long.toString(genStampV2)); } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.genStampV2 = Long.valueOf(st.getValue("GENSTAMPV2")); + this.genStampV2 = Long.parseLong(st.getValue("GENSTAMPV2")); } } @@ -1630,11 +1630,11 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "BLOCK_ID", - Long.valueOf(blockId).toString()); + Long.toString(blockId)); } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.blockId = Long.valueOf(st.getValue("BLOCK_ID")); + this.blockId = Long.parseLong(st.getValue("BLOCK_ID")); } } @@ -1830,12 +1830,12 @@ public abstract class FSEditLogOp { protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "SRC", src); XMLUtils.addSaxString(contentHandler, "NSQUOTA", - Long.valueOf(nsQuota).toString()); + Long.toString(nsQuota)); } @Override void fromXml(Stanza st) throws InvalidXmlException { this.src = st.getValue("SRC"); - this.nsQuota = Long.valueOf(st.getValue("NSQUOTA")); + this.nsQuota = Long.parseLong(st.getValue("NSQUOTA")); } } @@ -1951,15 +1951,15 @@ public abstract class FSEditLogOp { protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "SRC", src); XMLUtils.addSaxString(contentHandler, "NSQUOTA", - Long.valueOf(nsQuota).toString()); + Long.toString(nsQuota)); XMLUtils.addSaxString(contentHandler, "DSQUOTA", - Long.valueOf(dsQuota).toString()); + Long.toString(dsQuota)); } @Override void fromXml(Stanza st) throws InvalidXmlException { this.src = st.getValue("SRC"); - this.nsQuota = Long.valueOf(st.getValue("NSQUOTA")); - this.dsQuota = Long.valueOf(st.getValue("DSQUOTA")); + this.nsQuota = Long.parseLong(st.getValue("NSQUOTA")); + this.dsQuota = Long.parseLong(st.getValue("DSQUOTA")); } } @@ -2045,19 +2045,19 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "LENGTH", - Integer.valueOf(length).toString()); + Integer.toString(length)); XMLUtils.addSaxString(contentHandler, "PATH", path); XMLUtils.addSaxString(contentHandler, "MTIME", - Long.valueOf(mtime).toString()); + Long.toString(mtime)); XMLUtils.addSaxString(contentHandler, "ATIME", - Long.valueOf(atime).toString()); + Long.toString(atime)); } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.length = Integer.valueOf(st.getValue("LENGTH")); + this.length = Integer.parseInt(st.getValue("LENGTH")); this.path = st.getValue("PATH"); - this.mtime = Long.valueOf(st.getValue("MTIME")); - this.atime = Long.valueOf(st.getValue("ATIME")); + this.mtime = Long.parseLong(st.getValue("MTIME")); + this.atime = Long.parseLong(st.getValue("ATIME")); } } @@ -2184,27 +2184,27 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "LENGTH", - Integer.valueOf(length).toString()); + Integer.toString(length)); XMLUtils.addSaxString(contentHandler, "INODEID", - Long.valueOf(inodeId).toString()); + Long.toString(inodeId)); XMLUtils.addSaxString(contentHandler, "PATH", path); XMLUtils.addSaxString(contentHandler, "VALUE", value); XMLUtils.addSaxString(contentHandler, "MTIME", - Long.valueOf(mtime).toString()); + Long.toString(mtime)); XMLUtils.addSaxString(contentHandler, "ATIME", - Long.valueOf(atime).toString()); + Long.toString(atime)); FSEditLogOp.permissionStatusToXml(contentHandler, permissionStatus); appendRpcIdsToXml(contentHandler, rpcClientId, rpcCallId); } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.length = Integer.valueOf(st.getValue("LENGTH")); - this.inodeId = Long.valueOf(st.getValue("INODEID")); + this.length = Integer.parseInt(st.getValue("LENGTH")); + this.inodeId = Long.parseLong(st.getValue("INODEID")); this.path = st.getValue("PATH"); this.value = st.getValue("VALUE"); - this.mtime = Long.valueOf(st.getValue("MTIME")); - this.atime = Long.valueOf(st.getValue("ATIME")); + this.mtime = Long.parseLong(st.getValue("MTIME")); + this.atime = Long.parseLong(st.getValue("ATIME")); this.permissionStatus = permissionStatusFromXml(st); readRpcIdsFromXml(st); @@ -2328,11 +2328,11 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, "LENGTH", - Integer.valueOf(length).toString()); + Integer.toString(length)); XMLUtils.addSaxString(contentHandler, "SRC", src); XMLUtils.addSaxString(contentHandler, "DST", dst); XMLUtils.addSaxString(contentHandler, "TIMESTAMP", - Long.valueOf(timestamp).toString()); + Long.toString(timestamp)); StringBuilder bld = new StringBuilder(); String prefix = ""; for (Rename r : options) { @@ -2344,10 +2344,10 @@ public abstract class FSEditLogOp { } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.length = Integer.valueOf(st.getValue("LENGTH")); + this.length = Integer.parseInt(st.getValue("LENGTH")); this.src = st.getValue("SRC"); this.dst = st.getValue("DST"); - this.timestamp = Long.valueOf(st.getValue("TIMESTAMP")); + this.timestamp = Long.parseLong(st.getValue("TIMESTAMP")); String opts = st.getValue("OPTIONS"); String o[] = opts.split("\\|"); this.options = new Rename[o.length]; @@ -2509,13 +2509,13 @@ public abstract class FSEditLogOp { protected void toXml(ContentHandler contentHandler) throws SAXException { FSEditLogOp.delegationTokenToXml(contentHandler, token); XMLUtils.addSaxString(contentHandler, "EXPIRY_TIME", - Long.valueOf(expiryTime).toString()); + Long.toString(expiryTime)); } @Override void fromXml(Stanza st) throws InvalidXmlException { this.token = delegationTokenFromXml(st.getChildren( "DELEGATION_TOKEN_IDENTIFIER").get(0)); - this.expiryTime = Long.valueOf(st.getValue("EXPIRY_TIME")); + this.expiryTime = Long.parseLong(st.getValue("EXPIRY_TIME")); } } @@ -2582,13 +2582,13 @@ public abstract class FSEditLogOp { protected void toXml(ContentHandler contentHandler) throws SAXException { FSEditLogOp.delegationTokenToXml(contentHandler, token); XMLUtils.addSaxString(contentHandler, "EXPIRY_TIME", - Long.valueOf(expiryTime).toString()); + Long.toString(expiryTime)); } @Override void fromXml(Stanza st) throws InvalidXmlException { this.token = delegationTokenFromXml(st.getChildren( "DELEGATION_TOKEN_IDENTIFIER").get(0)); - this.expiryTime = Long.valueOf(st.getValue("EXPIRY_TIME")); + this.expiryTime = Long.parseLong(st.getValue("EXPIRY_TIME")); } } @@ -3624,12 +3624,12 @@ public abstract class FSEditLogOp { @Override protected void toXml(ContentHandler contentHandler) throws SAXException { XMLUtils.addSaxString(contentHandler, name + "TIME", - Long.valueOf(time).toString()); + Long.toString(time)); } @Override void fromXml(Stanza st) throws InvalidXmlException { - this.time = Long.valueOf(st.getValue(name + "TIME")); + this.time = Long.parseLong(st.getValue(name + "TIME")); } @Override @@ -3936,7 +3936,7 @@ public abstract class FSEditLogOp { abstract void fromXml(Stanza st) throws InvalidXmlException; public void decodeXml(Stanza st) throws InvalidXmlException { - this.txid = Long.valueOf(st.getValue("TXID")); + this.txid = Long.parseLong(st.getValue("TXID")); fromXml(st); } @@ -3944,19 +3944,19 @@ public abstract class FSEditLogOp { throws SAXException { contentHandler.startElement("", "", "BLOCK", new AttributesImpl()); XMLUtils.addSaxString(contentHandler, "BLOCK_ID", - Long.valueOf(block.getBlockId()).toString()); + Long.toString(block.getBlockId())); XMLUtils.addSaxString(contentHandler, "NUM_BYTES", - Long.valueOf(block.getNumBytes()).toString()); + Long.toString(block.getNumBytes())); XMLUtils.addSaxString(contentHandler, "GENSTAMP", - Long.valueOf(block.getGenerationStamp()).toString()); + Long.toString(block.getGenerationStamp())); contentHandler.endElement("", "", "BLOCK"); } public static Block blockFromXml(Stanza st) throws InvalidXmlException { - long blockId = Long.valueOf(st.getValue("BLOCK_ID")); - long numBytes = Long.valueOf(st.getValue("NUM_BYTES")); - long generationStamp = Long.valueOf(st.getValue("GENSTAMP")); + long blockId = Long.parseLong(st.getValue("BLOCK_ID")); + long numBytes = Long.parseLong(st.getValue("NUM_BYTES")); + long generationStamp = Long.parseLong(st.getValue("GENSTAMP")); return new Block(blockId, numBytes, generationStamp); } @@ -3965,7 +3965,7 @@ public abstract class FSEditLogOp { contentHandler.startElement("", "", "DELEGATION_TOKEN_IDENTIFIER", new AttributesImpl()); XMLUtils.addSaxString(contentHandler, "KIND", token.getKind().toString()); XMLUtils.addSaxString(contentHandler, "SEQUENCE_NUMBER", - Integer.valueOf(token.getSequenceNumber()).toString()); + Integer.toString(token.getSequenceNumber())); XMLUtils.addSaxString(contentHandler, "OWNER", token.getOwner().toString()); XMLUtils.addSaxString(contentHandler, "RENEWER", @@ -3973,11 +3973,11 @@ public abstract class FSEditLogOp { XMLUtils.addSaxString(contentHandler, "REALUSER", token.getRealUser().toString()); XMLUtils.addSaxString(contentHandler, "ISSUE_DATE", - Long.valueOf(token.getIssueDate()).toString()); + Long.toString(token.getIssueDate())); XMLUtils.addSaxString(contentHandler, "MAX_DATE", - Long.valueOf(token.getMaxDate()).toString()); + Long.toString(token.getMaxDate())); XMLUtils.addSaxString(contentHandler, "MASTER_KEY_ID", - Integer.valueOf(token.getMasterKeyId()).toString()); + Integer.toString(token.getMasterKeyId())); contentHandler.endElement("", "", "DELEGATION_TOKEN_IDENTIFIER"); } @@ -3989,13 +3989,13 @@ public abstract class FSEditLogOp { throw new InvalidXmlException("can't understand " + "DelegationTokenIdentifier KIND " + kind); } - int seqNum = Integer.valueOf(st.getValue("SEQUENCE_NUMBER")); + int seqNum = Integer.parseInt(st.getValue("SEQUENCE_NUMBER")); String owner = st.getValue("OWNER"); String renewer = st.getValue("RENEWER"); String realuser = st.getValue("REALUSER"); - long issueDate = Long.valueOf(st.getValue("ISSUE_DATE")); - long maxDate = Long.valueOf(st.getValue("MAX_DATE")); - int masterKeyId = Integer.valueOf(st.getValue("MASTER_KEY_ID")); + long issueDate = Long.parseLong(st.getValue("ISSUE_DATE")); + long maxDate = Long.parseLong(st.getValue("MAX_DATE")); + int masterKeyId = Integer.parseInt(st.getValue("MASTER_KEY_ID")); DelegationTokenIdentifier token = new DelegationTokenIdentifier(new Text(owner), new Text(renewer), new Text(realuser)); @@ -4010,9 +4010,9 @@ public abstract class FSEditLogOp { DelegationKey key) throws SAXException { contentHandler.startElement("", "", "DELEGATION_KEY", new AttributesImpl()); XMLUtils.addSaxString(contentHandler, "KEY_ID", - Integer.valueOf(key.getKeyId()).toString()); + Integer.toString(key.getKeyId())); XMLUtils.addSaxString(contentHandler, "EXPIRY_DATE", - Long.valueOf(key.getExpiryDate()).toString()); + Long.toString(key.getExpiryDate())); if (key.getEncodedKey() != null) { XMLUtils.addSaxString(contentHandler, "KEY", Hex.encodeHexString(key.getEncodedKey())); @@ -4022,8 +4022,8 @@ public abstract class FSEditLogOp { public static DelegationKey delegationKeyFromXml(Stanza st) throws InvalidXmlException { - int keyId = Integer.valueOf(st.getValue("KEY_ID")); - long expiryDate = Long.valueOf(st.getValue("EXPIRY_DATE")); + int keyId = Integer.parseInt(st.getValue("KEY_ID")); + long expiryDate = Long.parseLong(st.getValue("EXPIRY_DATE")); byte key[] = null; try { key = Hex.decodeHex(st.getValue("KEY").toCharArray()); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageTransactionalStorageInspector.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageTransactionalStorageInspector.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageTransactionalStorageInspector.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageTransactionalStorageInspector.java Wed Mar 26 21:27:33 2014 @@ -108,7 +108,7 @@ class FSImageTransactionalStorageInspect if (imageMatch != null) { if (sd.getStorageDirType().isOfType(NameNodeDirType.IMAGE)) { try { - long txid = Long.valueOf(imageMatch.group(1)); + long txid = Long.parseLong(imageMatch.group(1)); foundImages.add(new FSImageFile(sd, f, txid)); } catch (NumberFormatException nfe) { LOG.error("Image file " + f + " has improperly formatted " + Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileJournalManager.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileJournalManager.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileJournalManager.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileJournalManager.java Wed Mar 26 21:27:33 2014 @@ -247,8 +247,8 @@ public class FileJournalManager implemen Matcher editsMatch = EDITS_REGEX.matcher(name); if (editsMatch.matches()) { try { - long startTxId = Long.valueOf(editsMatch.group(1)); - long endTxId = Long.valueOf(editsMatch.group(2)); + long startTxId = Long.parseLong(editsMatch.group(1)); + long endTxId = Long.parseLong(editsMatch.group(2)); ret.add(new EditLogFile(f, startTxId, endTxId)); } catch (NumberFormatException nfe) { LOG.error("Edits file " + f + " has improperly formatted " + @@ -261,7 +261,7 @@ public class FileJournalManager implemen Matcher inProgressEditsMatch = EDITS_INPROGRESS_REGEX.matcher(name); if (inProgressEditsMatch.matches()) { try { - long startTxId = Long.valueOf(inProgressEditsMatch.group(1)); + long startTxId = Long.parseLong(inProgressEditsMatch.group(1)); ret.add( new EditLogFile(f, startTxId, HdfsConstants.INVALID_TXID, true)); } catch (NumberFormatException nfe) { Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/HostFileManager.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/HostFileManager.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/HostFileManager.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/HostFileManager.java Wed Mar 26 21:27:33 2014 @@ -105,7 +105,7 @@ public class HostFileManager { prefix = entry.substring(0, idx); String portStr = entry.substring(idx + 1); try { - port = Integer.valueOf(portStr); + port = Integer.parseInt(portStr); } catch (NumberFormatException e) { throw new IOException("unable to parse port number for " + "'" + entry + "'", e); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NNStorage.java Wed Mar 26 21:27:33 2014 @@ -786,7 +786,6 @@ public class NNStorage extends Storage i * Marks a list of directories as having experienced an error. * * @param sds A list of storage directories to mark as errored. - * @throws IOException */ void reportErrorsOnDirectories(List sds) { for (StorageDirectory sd : sds) { @@ -800,7 +799,6 @@ public class NNStorage extends Storage i * available. * * @param sd A storage directory to mark as errored. - * @throws IOException */ private void reportErrorsOnDirectory(StorageDirectory sd) { LOG.error("Error reported on storage directory " + sd); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java Wed Mar 26 21:27:33 2014 @@ -301,7 +301,7 @@ public class CacheAdmin extends Configur } long id; try { - id = Long.valueOf(idString); + id = Long.parseLong(idString); } catch (NumberFormatException e) { System.err.println("Invalid directive ID " + idString + ": expected " + "a numeric value."); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/DelimitedImageVisitor.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/DelimitedImageVisitor.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/DelimitedImageVisitor.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/DelimitedImageVisitor.java Wed Mar 26 21:27:33 2014 @@ -144,7 +144,7 @@ class DelimitedImageVisitor extends Text // Special case of file size, which is sum of the num bytes in each block if(element == ImageElement.NUM_BYTES) - fileSize += Long.valueOf(value); + fileSize += Long.parseLong(value); if(elements.containsKey(element) && element != ImageElement.NUM_BYTES) elements.put(element, value); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionVisitor.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionVisitor.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionVisitor.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FileDistributionVisitor.java Wed Mar 26 21:27:33 2014 @@ -159,10 +159,10 @@ class FileDistributionVisitor extends Te current.path = (value.equals("") ? "/" : value); break; case REPLICATION: - current.replication = Integer.valueOf(value); + current.replication = Integer.parseInt(value); break; case NUM_BYTES: - current.fileSize += Long.valueOf(value); + current.fileSize += Long.parseLong(value); break; default: break; Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java Wed Mar 26 21:27:33 2014 @@ -135,7 +135,7 @@ class LsImageVisitor extends TextWriterI perms = value; break; case REPLICATION: - replication = Integer.valueOf(value); + replication = Integer.parseInt(value); break; case USER_NAME: username = value; @@ -144,7 +144,7 @@ class LsImageVisitor extends TextWriterI group = value; break; case NUM_BYTES: - filesize += Long.valueOf(value); + filesize += Long.parseLong(value); break; case MODIFICATION_TIME: modTime = value; @@ -173,6 +173,6 @@ class LsImageVisitor extends TextWriterI if(element == ImageElement.INODE) newLine(); else if (element == ImageElement.BLOCKS) - numBlocks = Integer.valueOf(value); + numBlocks = Integer.parseInt(value); } } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java Wed Mar 26 21:27:33 2014 @@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs.util; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; -import java.io.FileReader; import java.io.IOException; import java.io.InputStreamReader; @@ -96,7 +95,7 @@ public class PersistentLongFile { new BufferedReader(new InputStreamReader(new FileInputStream( file), Charsets.UTF_8)); try { - val = Long.valueOf(br.readLine()); + val = Long.parseLong(br.readLine()); br.close(); br = null; } finally { Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java Wed Mar 26 21:27:33 2014 @@ -134,7 +134,6 @@ public class HftpFileSystem extends File * 3. DFS_NAMENODE_HTTP_PORT_DEFAULT i.e. 50070. * * @param uri - * @return */ protected InetSocketAddress getNamenodeAddr(URI uri) { // use authority so user supplied uri can override port @@ -435,9 +434,9 @@ public class HftpFileSystem extends File } catch (ParseException e) { throw new SAXException(e); } FileStatus fs = "file".equals(qname) ? new FileStatus( - Long.valueOf(attrs.getValue("size")).longValue(), false, + Long.parseLong(attrs.getValue("size")), false, Short.valueOf(attrs.getValue("replication")).shortValue(), - Long.valueOf(attrs.getValue("blocksize")).longValue(), + Long.parseLong(attrs.getValue("blocksize")), modif, atime, FsPermission.valueOf(attrs.getValue("permission")), attrs.getValue("owner"), attrs.getValue("group"), HftpFileSystem.this.makeQualified( Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFetchImage.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFetchImage.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFetchImage.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFetchImage.java Wed Mar 26 21:27:33 2014 @@ -107,7 +107,7 @@ public class TestFetchImage { for (File imageFile : new File(new File(nameDir), "current").listFiles()) { Matcher imageMatch = IMAGE_REGEX.matcher(imageFile.getName()); if (imageMatch.matches()) { - long imageTxId = Long.valueOf(imageMatch.group(1)); + long imageTxId = Long.parseLong(imageMatch.group(1)); if (imageTxId > highestImageTxId) { highestImageTxId = imageTxId; highestImageOnNn = imageFile; Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestShortCircuitLocalRead.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestShortCircuitLocalRead.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestShortCircuitLocalRead.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestShortCircuitLocalRead.java Wed Mar 26 21:27:33 2014 @@ -518,7 +518,7 @@ public class TestShortCircuitLocalRead { } boolean shortcircuit = Boolean.valueOf(args[0]); boolean checksum = Boolean.valueOf(args[1]); - int threadCount = Integer.valueOf(args[2]); + int threadCount = Integer.parseInt(args[2]); // Setup create a file final Configuration conf = new Configuration(); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java Wed Mar 26 21:27:33 2014 @@ -1059,11 +1059,11 @@ public class TestEditLog { assertTrue("bad spec: " + logSpec, m.matches()); if (m.group(2) == null) { files.add(NNStorage.getInProgressEditsFileName( - Long.valueOf(m.group(1)))); + Long.parseLong(m.group(1)))); } else { files.add(NNStorage.getFinalizedEditsFileName( - Long.valueOf(m.group(1)), - Long.valueOf(m.group(2)))); + Long.parseLong(m.group(1)), + Long.parseLong(m.group(2)))); } } StorageDirectory sd = FSImageTestUtil.mockStorageDirectory( Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java Wed Mar 26 21:27:33 2014 @@ -316,7 +316,7 @@ public class TestNameNodeJspHelper { String digitLine = output.substring(matcher.start(), matcher.end()) .trim(); assertTrue("assertCounts error. actual != expected", - Integer.valueOf(digitLine) == expectedCount); + Integer.parseInt(digitLine) == expectedCount); } else { fail("assertCount matcher error"); } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/SpotCheckImageVisitor.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/SpotCheckImageVisitor.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/SpotCheckImageVisitor.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/SpotCheckImageVisitor.java Wed Mar 26 21:27:33 2014 @@ -43,9 +43,9 @@ class SpotCheckImageVisitor extends Imag @Override void visit(ImageElement element, String value) throws IOException { if(element == ImageElement.NUM_BYTES) - current.totalFileSize += Long.valueOf(value); + current.totalFileSize += Long.parseLong(value); else if (element == ImageElement.REPLICATION) - current.totalReplications += Long.valueOf(value); + current.totalReplications += Long.parseLong(value); else if (element == ImageElement.INODE_PATH) current.pathNames.add(value); } @@ -61,7 +61,7 @@ class SpotCheckImageVisitor extends Imag current = INUCs; break; case BLOCKS: - current.totalNumBlocks += Long.valueOf(value); + current.totalNumBlocks += Long.parseLong(value); break; // OK to not have a default, we're skipping most of the values } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java?rev=1582068&r1=1582067&r2=1582068&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java Wed Mar 26 21:27:33 2014 @@ -186,7 +186,7 @@ public class TestDelegationTokenRemoteFe NumberFormatException, AuthenticationException { bootstrap = startHttpServer(httpPort, testToken, serviceUrl); assertTrue("testRenewTokenFromHttp error", - Long.valueOf(EXP_DATE) == DelegationTokenFetcher.renewDelegationToken( + Long.parseLong(EXP_DATE) == DelegationTokenFetcher.renewDelegationToken( connectionFactory, serviceUrl, testToken)); if (assertionError != null) throw assertionError;