hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From whe...@apache.org
Subject hadoop git commit: HADOOP-10482. Fix various findbugs warnings in hadoop-common. Contributed by Haohui Mai.
Date Wed, 10 Dec 2014 20:44:36 GMT
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 a1e4a12dc -> 7e51350d9


HADOOP-10482. Fix various findbugs warnings in hadoop-common. Contributed by Haohui Mai.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7e51350d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7e51350d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7e51350d

Branch: refs/heads/branch-2
Commit: 7e51350d9a4fc68d4b3a778b9265b29a4da74469
Parents: a1e4a12
Author: Haohui Mai <wheat9@apache.org>
Authored: Wed Dec 10 12:44:25 2014 -0800
Committer: Haohui Mai <wheat9@apache.org>
Committed: Wed Dec 10 12:44:32 2014 -0800

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |  2 ++
 .../dev-support/findbugsExcludeFile.xml         | 10 +++++++
 .../org/apache/hadoop/conf/Configuration.java   |  7 ++---
 .../hadoop/crypto/key/JavaKeyStoreProvider.java |  9 ++----
 .../java/org/apache/hadoop/fs/FileSystem.java   |  3 --
 .../org/apache/hadoop/fs/HarFileSystem.java     | 23 +++------------
 .../org/apache/hadoop/fs/LocalDirAllocator.java |  2 +-
 .../hadoop/fs/MD5MD5CRC32FileChecksum.java      |  8 +++---
 .../org/apache/hadoop/fs/ftp/FTPFileSystem.java | 21 ++++++--------
 .../java/org/apache/hadoop/fs/shell/Ls.java     |  2 +-
 .../java/org/apache/hadoop/fs/shell/Stat.java   |  4 +--
 .../java/org/apache/hadoop/fs/shell/Test.java   |  2 ++
 .../main/java/org/apache/hadoop/ha/HAAdmin.java |  6 ----
 .../org/apache/hadoop/ha/SshFenceByTcpPort.java |  2 ++
 .../java/org/apache/hadoop/io/LongWritable.java |  4 +--
 .../main/java/org/apache/hadoop/io/Text.java    |  2 ++
 .../hadoop/io/compress/DecompressorStream.java  |  2 +-
 .../main/java/org/apache/hadoop/ipc/Server.java | 15 +++++-----
 .../metrics/ganglia/GangliaContext31.java       |  5 ----
 .../hadoop/metrics/spi/CompositeContext.java    |  2 +-
 .../hadoop/metrics2/lib/MutableCounterInt.java  | 17 +++++------
 .../hadoop/metrics2/lib/MutableCounterLong.java | 19 +++++++------
 .../hadoop/metrics2/lib/MutableGaugeInt.java    | 30 ++++++++++----------
 .../hadoop/metrics2/lib/MutableGaugeLong.java   | 30 ++++++++++----------
 .../java/org/apache/hadoop/net/NetUtils.java    |  2 +-
 .../net/ScriptBasedMappingWithDependency.java   |  3 +-
 .../hadoop/security/LdapGroupsMapping.java      |  6 +---
 .../apache/hadoop/util/ComparableVersion.java   |  2 ++
 .../apache/hadoop/util/PrintJarMainClass.java   | 17 +++++------
 .../org/apache/hadoop/util/ServletUtil.java     |  6 ++--
 30 files changed, 118 insertions(+), 145 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index d02a1e0..87af173 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -192,6 +192,8 @@ Release 2.7.0 - UNRELEASED
     HADOOP-11381. Fix findbugs warnings in hadoop-distcp, hadoop-aws,
     hadoop-azure, and hadoop-openstack. (Li Lu via wheat9)
 
+    HADOOP-10482. Fix various findbugs warnings in hadoop-common. (wheat9)
+
 Release 2.6.0 - 2014-11-18
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
index d14ffd6..e03d3b9 100644
--- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
@@ -259,6 +259,16 @@
        <Method name="writeVLong" />
        <Bug pattern="SF_SWITCH_FALLTHROUGH" />
      </Match>
+     <Match>
+       <Class name="org.apache.hadoop.io.Text" />
+       <Method name="bytesToCodePoint" />
+       <Bug pattern="SF_SWITCH_NO_DEFAULT" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.util.PureJavaCrc32C" />
+       <Method name="update" />
+       <Bug pattern="SF_SWITCH_NO_DEFAULT" />
+     </Match>
     <!--
 	  The switch condition fall through is intentional and for performance
 	  purposes.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 1276865..a48ba17 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -1481,11 +1481,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param pattern new value
    */
   public void setPattern(String name, Pattern pattern) {
-    if (null == pattern) {
-      set(name, null);
-    } else {
-      set(name, pattern.pattern());
-    }
+    assert pattern != null : "Pattern cannot be null";
+    set(name, pattern.pattern());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
index 75981c4..a9738ba 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
@@ -144,13 +144,8 @@ public class JavaKeyStoreProvider extends KeyProvider {
           // Provided Password file does not exist
           throw new IOException("Password file does not exists");
         }
-        if (pwdFile != null) {
-          InputStream is = pwdFile.openStream();
-          try {
-            password = IOUtils.toString(is).trim().toCharArray();
-          } finally {
-            is.close();
-          }
+        try (InputStream is = pwdFile.openStream()) {
+          password = IOUtils.toString(is).trim().toCharArray();
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 745238c..5923aa8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -2619,9 +2619,6 @@ public abstract class FileSystem extends Configured implements Closeable
{
   private static FileSystem createFileSystem(URI uri, Configuration conf
       ) throws IOException {
     Class<?> clazz = getFileSystemClass(uri.getScheme(), conf);
-    if (clazz == null) {
-      throw new IOException("No FileSystem for scheme: " + uri.getScheme());
-    }
     FileSystem fs = (FileSystem)ReflectionUtils.newInstance(clazz, conf);
     fs.initialize(uri, conf);
     return fs;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
index 3ba6de1..0fba268 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
@@ -220,12 +220,7 @@ public class HarFileSystem extends FileSystem {
       return FileSystem.getDefaultUri(conf);
     }
     String authority = rawURI.getAuthority();
-    if (authority == null) {
-      throw new IOException("URI: " + rawURI
-          + " is an invalid Har URI since authority==null."
-          + "  Expecting har://<scheme>-<host>/<path>.");
-    }
- 
+
     int i = authority.indexOf('-');
     if (i < 0) {
       throw new IOException("URI: " + rawURI
@@ -489,19 +484,12 @@ public class HarFileSystem extends FileSystem {
   }
   
   static class Store {
-    public Store() {
-      begin = end = startHash = endHash = 0;
-    }
-    public Store(long begin, long end, int startHash, int endHash) {
+    public Store(long begin, long end) {
       this.begin = begin;
       this.end = end;
-      this.startHash = startHash;
-      this.endHash = endHash;
     }
     public long begin;
     public long end;
-    public int startHash;
-    public int endHash;
   }
   
   /**
@@ -594,7 +582,7 @@ public class HarFileSystem extends FileSystem {
     public HarStatus(String harString) throws UnsupportedEncodingException {
       String[] splits = harString.split(" ");
       this.name = decodeFileName(splits[0]);
-      this.isDir = "dir".equals(splits[1]) ? true: false;
+      this.isDir = "dir".equals(splits[1]);
       // this is equal to "none" if its a directory
       this.partName = splits[2];
       this.startIndex = Long.parseLong(splits[3]);
@@ -1167,11 +1155,8 @@ public class HarFileSystem extends FileSystem {
           int b = lin.readLine(line);
           read += b;
           readStr = line.toString().split(" ");
-          int startHash = Integer.parseInt(readStr[0]);
-          int endHash  = Integer.parseInt(readStr[1]);
           stores.add(new Store(Long.parseLong(readStr[2]), 
-              Long.parseLong(readStr[3]), startHash,
-              endHash));
+              Long.parseLong(readStr[3])));
           line.clear();
         }
       } catch (IOException ioe) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
index 7748ac5..6fb34e3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
@@ -369,7 +369,7 @@ public class LocalDirAllocator {
         // Keep rolling the wheel till we get a valid path
         Random r = new java.util.Random();
         while (numDirsSearched < numDirs && returnPath == null) {
-          long randomPosition = Math.abs(r.nextLong()) % totalAvailable;
+          long randomPosition = (r.nextLong() >>> 1) % totalAvailable;
           int dir = 0;
           while (randomPosition > availableOnDisk[dir]) {
             randomPosition -= availableOnDisk[dir];

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
index 5f1fc01..4da7851 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
@@ -143,13 +143,13 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
       switch (finalCrcType) {
         case CRC32:
           return new MD5MD5CRC32GzipFileChecksum(
-              Integer.valueOf(bytesPerCRC),
-              Integer.valueOf(crcPerBlock),
+              Integer.parseInt(bytesPerCRC),
+              Integer.parseInt(crcPerBlock),
               new MD5Hash(md5));
         case CRC32C:
           return new MD5MD5CRC32CastagnoliFileChecksum(
-              Integer.valueOf(bytesPerCRC),
-              Integer.valueOf(crcPerBlock),
+              Integer.parseInt(bytesPerCRC),
+              Integer.parseInt(crcPerBlock),
               new MD5Hash(md5));
         default:
           // we should never get here since finalCrcType will

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
index 9d36bcf..47fb25c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
@@ -23,6 +23,7 @@ import java.io.InputStream;
 import java.net.ConnectException;
 import java.net.URI;
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.net.ftp.FTP;
@@ -101,17 +102,12 @@ public class FTPFileSystem extends FileSystem {
     if (userAndPassword == null) {
       userAndPassword = (conf.get("fs.ftp.user." + host, null) + ":" + conf
           .get("fs.ftp.password." + host, null));
-      if (userAndPassword == null) {
-        throw new IOException("Invalid user/passsword specified");
-      }
     }
     String[] userPasswdInfo = userAndPassword.split(":");
+    Preconditions.checkState(userPasswdInfo.length > 1,
+                             "Invalid username / password");
     conf.set(FS_FTP_USER_PREFIX + host, userPasswdInfo[0]);
-    if (userPasswdInfo.length > 1) {
-      conf.set(FS_FTP_PASSWORD_PREFIX + host, userPasswdInfo[1]);
-    } else {
-      conf.set(FS_FTP_PASSWORD_PREFIX + host, null);
-    }
+    conf.set(FS_FTP_PASSWORD_PREFIX + host, userPasswdInfo[1]);
     setConf(conf);
     this.uri = uri;
   }
@@ -293,7 +289,8 @@ public class FTPFileSystem extends FileSystem {
    */
   private boolean exists(FTPClient client, Path file) throws IOException {
     try {
-      return getFileStatus(client, file) != null;
+      getFileStatus(client, file);
+      return true;
     } catch (FileNotFoundException fnfe) {
       return false;
     }
@@ -333,10 +330,8 @@ public class FTPFileSystem extends FileSystem {
     if (dirEntries != null && dirEntries.length > 0 && !(recursive)) {
       throw new IOException("Directory: " + file + " is not empty.");
     }
-    if (dirEntries != null) {
-      for (int i = 0; i < dirEntries.length; i++) {
-        delete(client, new Path(absolute, dirEntries[i].getPath()), recursive);
-      }
+    for (FileStatus dirEntry : dirEntries) {
+      delete(client, new Path(absolute, dirEntry.getPath()), recursive);
     }
     return client.removeDirectory(pathName);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
index 6024d88..c7e80b6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
@@ -57,7 +57,7 @@ class Ls extends FsCommand {
 		  
   
 
-  protected static final SimpleDateFormat dateFormat = 
+  protected final SimpleDateFormat dateFormat =
     new SimpleDateFormat("yyyy-MM-dd HH:mm");
 
   protected int maxRepl = 3, maxLen = 10, maxOwner = 0, maxGroup = 0;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java
index ee56fe6..6365294 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java
@@ -55,8 +55,8 @@ class Stat extends FsCommand {
     "in the specified format. Format accepts filesize in blocks (%b), group name of owner(%g),
" +
     "filename (%n), block size (%o), replication (%r), user name of owner(%u), modification
date (%y, %Y)\n";
 
-  protected static final SimpleDateFormat timeFmt;
-  static {
+  protected final SimpleDateFormat timeFmt;
+  {
     timeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
     timeFmt.setTimeZone(TimeZone.getTimeZone("UTC"));
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
index 4cfdb08..9984cf2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
@@ -83,6 +83,8 @@ class Test extends FsCommand {
       case 'z':
         test = (item.stat.getLen() == 0);
         break;
+      default:
+        break;
     }
     if (!test) exitCode = 1;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
index bd6366c..a6c4a42 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
@@ -168,12 +168,6 @@ public abstract class HAAdmin extends Configured implements Tool {
   private boolean isOtherTargetNodeActive(String targetNodeToActivate, boolean forceActive)
       throws IOException  {
     Collection<String> targetIds = getTargetIds(targetNodeToActivate);
-    if(targetIds == null) {
-      errOut.println("transitionToActive: No target node in the "
-          + "current configuration");
-      printUsage(errOut, "-transitionToActive");
-      return true;
-    }
     targetIds.remove(targetNodeToActivate);
     for(String targetId : targetIds) {
       HAServiceTarget target = resolveTarget(targetId);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
index 0f54651..90eb915 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
@@ -310,6 +310,8 @@ public class SshFenceByTcpPort extends Configured
       case com.jcraft.jsch.Logger.FATAL:
         LOG.fatal(message);
         break;
+      default:
+        break;
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
index 6dec4aa..b77ca67 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
@@ -99,11 +99,11 @@ public class LongWritable implements WritableComparable<LongWritable>
{
     
     @Override
     public int compare(WritableComparable a, WritableComparable b) {
-      return -super.compare(a, b);
+      return super.compare(b, a);
     }
     @Override
     public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
-      return -super.compare(b1, s1, l1, b2, s2, l2);
+      return super.compare(b2, s2, l2, b1, s1, l1);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
index 3dc5076..0bcaee3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
@@ -584,6 +584,8 @@ public class Text extends BinaryComparable
           state = TRAIL_BYTE;
         }
         break;
+      default:
+        break;
       } // switch (state)
       count++;
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
index 16e0ad7..6bee6b84 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
@@ -40,7 +40,7 @@ public class DecompressorStream extends CompressionInputStream {
   throws IOException {
     super(in);
 
-    if (in == null || decompressor == null) {
+    if (decompressor == null) {
       throw new NullPointerException();
     } else if (bufferSize <= 0) {
       throw new IllegalArgumentException("Illegal bufferSize");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
index 127f22b..2c76cdb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
@@ -662,7 +662,7 @@ public abstract class Server {
         assert !running;
         readSelector.wakeup();
         try {
-          join();
+          super.join();
         } catch (InterruptedException ie) {
           Thread.currentThread().interrupt();
         }
@@ -1116,7 +1116,8 @@ public abstract class Server {
     private ByteBuffer data;
     private ByteBuffer dataLengthBuffer;
     private LinkedList<Call> responseQueue;
-    private volatile int rpcCount = 0; // number of outstanding rpcs
+    // number of outstanding rpcs
+    private AtomicInteger rpcCount = new AtomicInteger();
     private long lastContact;
     private int dataLength;
     private Socket socket;
@@ -1201,17 +1202,17 @@ public abstract class Server {
 
     /* Return true if the connection has no outstanding rpc */
     private boolean isIdle() {
-      return rpcCount == 0;
+      return rpcCount.get() == 0;
     }
     
     /* Decrement the outstanding RPC count */
     private void decRpcCount() {
-      rpcCount--;
+      rpcCount.decrementAndGet();
     }
     
     /* Increment the outstanding RPC count */
     private void incRpcCount() {
-      rpcCount++;
+      rpcCount.incrementAndGet();
     }
     
     private UserGroupInformation getAuthorizedUgi(String authorizedId)
@@ -1982,9 +1983,9 @@ public abstract class Server {
         LOG.debug("Ignoring socket shutdown exception", e);
       }
       if (channel.isOpen()) {
-        try {channel.close();} catch(Exception e) {}
+        IOUtils.cleanup(null, channel);
       }
-      try {socket.close();} catch(Exception e) {}
+      IOUtils.cleanup(null, socket);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
index 56444d2..2baa180 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
@@ -84,11 +84,6 @@ public class GangliaContext31 extends GangliaContext {
       value + " from hostname" + hostName);
 
     String units = getUnits(name);
-    if (units == null) {
-      LOG.warn("Metric name " + name + ", value " + value
-        + " had 'null' units");
-      units = "";
-    }
     int slope = getSlope(name);
     int tmax = getTmax(name);
     int dmax = getDmax(name);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
index b78d8a5..53d075d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
@@ -54,7 +54,7 @@ public class CompositeContext extends AbstractMetricsContext {
     int nKids;
     try {
       String sKids = getAttribute(ARITY_LABEL);
-      nKids = Integer.valueOf(sKids);
+      nKids = Integer.parseInt(sKids);
     } catch (Exception e) {
       LOG.error("Unable to initialize composite metric " + contextName +
                 ": could not init arity", e);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterInt.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterInt.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterInt.java
index b0fb0d4..77139e2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterInt.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterInt.java
@@ -23,23 +23,24 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 
+import java.util.concurrent.atomic.AtomicInteger;
+
 /**
  * A mutable int counter for implementing metrics sources
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class MutableCounterInt extends MutableCounter {
-  private volatile int value;
+  private AtomicInteger value = new AtomicInteger();
 
   MutableCounterInt(MetricsInfo info, int initValue) {
     super(info);
-    this.value = initValue;
+    this.value.set(initValue);
   }
 
   @Override
-  public synchronized void incr() {
-    ++value;
-    setChanged();
+  public void incr() {
+    incr(1);
   }
 
   /**
@@ -47,18 +48,18 @@ public class MutableCounterInt extends MutableCounter {
    * @param delta of the increment
    */
   public synchronized void incr(int delta) {
-    value += delta;
+    value.addAndGet(delta);
     setChanged();
   }
 
   public int value() {
-    return value;
+    return value.get();
   }
 
   @Override
   public void snapshot(MetricsRecordBuilder builder, boolean all) {
     if (all || changed()) {
-      builder.addCounter(info(), value);
+      builder.addCounter(info(), value());
       clearChanged();
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterLong.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterLong.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterLong.java
index 43ea490..03a6043 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterLong.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterLong.java
@@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 
+import java.util.concurrent.atomic.AtomicLong;
+
 /**
  * A mutable long counter
  */
@@ -30,36 +32,35 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 @InterfaceStability.Evolving
 public class MutableCounterLong extends MutableCounter {
 
-  private volatile long value;
+  private AtomicLong value = new AtomicLong();
 
   MutableCounterLong(MetricsInfo info, long initValue) {
     super(info);
-    this.value = initValue;
+    this.value.set(initValue);
   }
 
   @Override
-  public synchronized void incr() {
-    ++value;
-    setChanged();
+  public void incr() {
+    incr(1);
   }
 
   /**
    * Increment the value by a delta
    * @param delta of the increment
    */
-  public synchronized void incr(long delta) {
-    value += delta;
+  public void incr(long delta) {
+    value.addAndGet(delta);
     setChanged();
   }
 
   public long value() {
-    return value;
+    return value.get();
   }
 
   @Override
   public void snapshot(MetricsRecordBuilder builder, boolean all) {
     if (all || changed()) {
-      builder.addCounter(info(), value);
+      builder.addCounter(info(), value());
       clearChanged();
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeInt.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeInt.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeInt.java
index 1a3a15c..cce4528 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeInt.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeInt.java
@@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 
+import java.util.concurrent.atomic.AtomicInteger;
+
 /**
  * A mutable int gauge
  */
@@ -30,44 +32,42 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 @InterfaceStability.Evolving
 public class MutableGaugeInt extends MutableGauge {
 
-  private volatile int value;
+  private AtomicInteger value = new AtomicInteger();
 
   MutableGaugeInt(MetricsInfo info, int initValue) {
     super(info);
-    this.value = initValue;
+    this.value.set(initValue);
   }
 
   public int value() {
-    return value;
+    return value.get();
   }
 
   @Override
-  public synchronized void incr() {
-    ++value;
-    setChanged();
+  public void incr() {
+    incr(1);
   }
 
   /**
    * Increment by delta
    * @param delta of the increment
    */
-  public synchronized void incr(int delta) {
-    value += delta;
+  public void incr(int delta) {
+    value.addAndGet(delta);
     setChanged();
   }
 
   @Override
-  public synchronized void decr() {
-    --value;
-    setChanged();
+  public void decr() {
+    decr(1);
   }
 
   /**
    * decrement by delta
    * @param delta of the decrement
    */
-  public synchronized void decr(int delta) {
-    value -= delta;
+  public void decr(int delta) {
+    value.addAndGet(-delta);
     setChanged();
   }
 
@@ -76,14 +76,14 @@ public class MutableGaugeInt extends MutableGauge {
    * @param value to set
    */
   public void set(int value) {
-    this.value = value;
+    this.value.set(value);
     setChanged();
   }
 
   @Override
   public void snapshot(MetricsRecordBuilder builder, boolean all) {
     if (all || changed()) {
-      builder.addGauge(info(), value);
+      builder.addGauge(info(), value());
       clearChanged();
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java
index 61be2a9..cf2fc45 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java
@@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 
+import java.util.concurrent.atomic.AtomicLong;
+
 /**
  * A mutable long gauge
  */
@@ -30,44 +32,42 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 @InterfaceStability.Evolving
 public class MutableGaugeLong extends MutableGauge {
 
-  private volatile long value;
+  private AtomicLong value = new AtomicLong();
 
   MutableGaugeLong(MetricsInfo info, long initValue) {
     super(info);
-    this.value = initValue;
+    this.value.set(initValue);
   }
 
   public long value() {
-    return value;
+    return value.get();
   }
 
   @Override
-  public synchronized void incr() {
-    ++value;
-    setChanged();
+  public void incr() {
+    incr(1);
   }
 
   /**
    * Increment by delta
    * @param delta of the increment
    */
-  public synchronized void incr(long delta) {
-    value += delta;
+  public void incr(long delta) {
+    value.addAndGet(delta);
     setChanged();
   }
 
   @Override
-  public synchronized void decr() {
-    --value;
-    setChanged();
+  public void decr() {
+    decr(1);
   }
 
   /**
    * decrement by delta
    * @param delta of the decrement
    */
-  public synchronized void decr(long delta) {
-    value -= delta;
+  public void decr(long delta) {
+    value.addAndGet(-delta);
     setChanged();
   }
 
@@ -76,13 +76,13 @@ public class MutableGaugeLong extends MutableGauge {
    * @param value to set
    */
   public void set(long value) {
-    this.value = value;
+    this.value.set(value);
     setChanged();
   }
 
   public void snapshot(MetricsRecordBuilder builder, boolean all) {
     if (all || changed()) {
-      builder.addGauge(info(), value);
+      builder.addGauge(info(), value());
       clearChanged();
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
index b535dda..ffee402 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
@@ -287,7 +287,7 @@ public class NetUtils {
     if (fqHost == null) {
       try {
         fqHost = SecurityUtil.getByName(host).getHostName();
-        // slight race condition, but won't hurt 
+        // slight race condition, but won't hurt
         canonicalizedHostCache.put(host, fqHost);
       } catch (UnknownHostException e) {
         fqHost = host;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
index 8a0a003..086650b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
@@ -171,8 +171,7 @@ public class ScriptBasedMappingWithDependency  extends ScriptBasedMapping
 
     @Override
     public String toString() {
-      return super.toString() + ", " + dependencyScriptName != null ?
-          ("dependency script " + dependencyScriptName) : NO_SCRIPT;
+      return "dependency script " + dependencyScriptName;
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
index 76f5380..e72d988 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
@@ -367,10 +367,8 @@ public class LdapGroupsMapping
       return "";
     }
     
-    Reader reader = null;
-    try {
+    try (Reader reader = new FileReader(pwFile)) {
       StringBuilder password = new StringBuilder();
-      reader = new FileReader(pwFile);
       int c = reader.read();
       while (c > -1) {
         password.append((char)c);
@@ -379,8 +377,6 @@ public class LdapGroupsMapping
       return password.toString().trim();
     } catch (IOException ioe) {
       throw new RuntimeException("Could not read password file: " + pwFile, ioe);
-    } finally {
-      IOUtils.cleanup(LOG, reader);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
index a57342f..ebe46d8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
@@ -195,6 +195,8 @@ public class ComparableVersion
                     case 'm':
                         value = "milestone";
                         break;
+                    default:
+                        break;
                 }
             }
             this.value = ALIASES.getProperty( value , value );

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java
index efa4de3..df571f3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java
@@ -34,16 +34,13 @@ public class PrintJarMainClass {
    * @param args
    */
   public static void main(String[] args) {
-    try {
-      JarFile jar_file = new JarFile(args[0]);
-      if (jar_file != null) {
-        Manifest manifest = jar_file.getManifest();
-        if (manifest != null) {
-          String value = manifest.getMainAttributes().getValue("Main-Class");
-          if (value != null) {
-            System.out.println(value.replaceAll("/", "."));
-            return;
-          }
+    try (JarFile jar_file = new JarFile(args[0])) {
+      Manifest manifest = jar_file.getManifest();
+      if (manifest != null) {
+        String value = manifest.getMainAttributes().getValue("Main-Class");
+        if (value != null) {
+          System.out.println(value.replaceAll("/", "."));
+          return;
         }
       }
     } catch (Throwable e) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e51350d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java
index a401f3f..48624cc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java
@@ -72,14 +72,14 @@ public class ServletUtil {
       throw new IOException("Invalid request has no " + param + " parameter");
     }
     
-    return Long.valueOf(paramStr);
+    return Long.parseLong(paramStr);
   }
 
   public static final String HTML_TAIL = "<hr />\n"
-    + "<a href='http://hadoop.apache.org/core'>Hadoop</a>, " 
+    + "<a href='http://hadoop.apache.org/core'>Hadoop</a>, "
     + Calendar.getInstance().get(Calendar.YEAR) + ".\n"
     + "</body></html>";
-  
+
   /**
    * HTML footer to be added in the jsps.
    * @return the HTML footer.


Mime
View raw message