hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From z..@apache.org
Subject [37/50] hadoop git commit: HADOOP-11429. Findbugs warnings in hadoop extras. Contributed by Varun Saxena.
Date Wed, 24 Dec 2014 19:35:51 GMT
HADOOP-11429. Findbugs warnings in hadoop extras. Contributed by Varun Saxena.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ef84c33e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ef84c33e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ef84c33e

Branch: refs/heads/HDFS-EC
Commit: ef84c33ed40404b09e73d5a30739a2fdf6920f19
Parents: 07db849
Author: Haohui Mai <wheat9@apache.org>
Authored: Sun Dec 21 14:57:34 2014 -0800
Committer: Zhe Zhang <zhezhang@cloudera.com>
Committed: Wed Dec 24 11:22:18 2014 -0800

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +++
 .../java/org/apache/hadoop/tools/DistCh.java    | 23 +++++---------------
 .../java/org/apache/hadoop/tools/DistCpV1.java  | 10 ++++-----
 .../java/org/apache/hadoop/tools/DistTool.java  |  8 +++----
 .../java/org/apache/hadoop/tools/Logalyzer.java |  9 ++++----
 5 files changed, 20 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef84c33e/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index e30c52f..2c1a51d 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -641,6 +641,9 @@ Release 2.7.0 - UNRELEASED
     HADOOP-11431. clean up redundant maven-site-plugin configuration.
     (Herve Boutemy via wheat9)
 
+    HADOOP-11429. Findbugs warnings in hadoop extras.
+    (Varun Saxena via wheat9)
+
 Release 2.6.0 - 2014-11-18
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef84c33e/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCh.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCh.java
b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCh.java
index 8779e06..ed08139 100644
--- a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCh.java
+++ b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCh.java
@@ -238,11 +238,10 @@ public class DistCh extends DistTool {
 
       Text key = new Text();
       FileOperation value = new FileOperation();
-      SequenceFile.Reader in = null;
       long prev = 0L;
       int count = 0; //count src
-      try {
-        for(in = new SequenceFile.Reader(fs, srcs, job); in.next(key, value); ) {
+      try (SequenceFile.Reader in = new SequenceFile.Reader(fs, srcs, job)) {
+        for ( ; in.next(key, value); ) {
           long curr = in.getPosition();
           long delta = curr - prev;
           if (++count > targetcount) {
@@ -252,9 +251,6 @@ public class DistCh extends DistTool {
           }
         }
       }
-      finally {
-        in.close();
-      }
       long remaining = fs.getFileStatus(srcs).getLen() - prev;
       if (remaining != 0) {
         splits.add(new FileSplit(srcs, prev, remaining, (String[])null));
@@ -449,10 +445,8 @@ public class DistCh extends DistTool {
     Path opList = new Path(jobdir, "_" + OP_LIST_LABEL);
     jobconf.set(OP_LIST_LABEL, opList.toString());
     int opCount = 0, synCount = 0;
-    SequenceFile.Writer opWriter = null;
-    try {
-      opWriter = SequenceFile.createWriter(fs, jobconf, opList, Text.class,
-          FileOperation.class, SequenceFile.CompressionType.NONE);
+    try (SequenceFile.Writer opWriter = SequenceFile.createWriter(fs, jobconf, opList, Text.class,
+            FileOperation.class, SequenceFile.CompressionType.NONE)) {
       for(FileOperation op : ops) {
         FileStatus srcstat = fs.getFileStatus(op.src); 
         if (srcstat.isDirectory() && op.isDifferent(srcstat)) {
@@ -479,8 +473,6 @@ public class DistCh extends DistTool {
           }
         }
       }
-    } finally {
-      opWriter.close();
     }
 
     checkDuplication(fs, opList, new Path(jobdir, "_sorted"), jobconf);
@@ -496,9 +488,7 @@ public class DistCh extends DistTool {
     SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs,
         new Text.Comparator(), Text.class, FileOperation.class, conf);
     sorter.sort(file, sorted);
-    SequenceFile.Reader in = null;
-    try {
-      in = new SequenceFile.Reader(fs, sorted, conf);
+    try (SequenceFile.Reader in = new SequenceFile.Reader(fs, sorted, conf)) {
       FileOperation curop = new FileOperation();
       Text prevsrc = null, cursrc = new Text(); 
       for(; in.next(cursrc, curop); ) {
@@ -512,9 +502,6 @@ public class DistCh extends DistTool {
         curop = new FileOperation();
       }
     }
-    finally {
-      in.close();
-    }
   } 
 
   public static void main(String[] args) throws Exception {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef84c33e/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
index d1e65e2..6801d6f 100644
--- a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
+++ b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
@@ -24,6 +24,7 @@ import java.io.DataOutput;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.HashSet;
@@ -697,16 +698,13 @@ public class DistCpV1 implements Tool {
       throws IOException {
     List<Path> result = new ArrayList<Path>();
     FileSystem fs = srcList.getFileSystem(conf);
-    BufferedReader input = null;
-    try {
-      input = new BufferedReader(new InputStreamReader(fs.open(srcList)));
+    try (BufferedReader input = new BufferedReader(new InputStreamReader(fs.open(srcList),
+            Charset.forName("UTF-8")))) {
       String line = input.readLine();
       while (line != null) {
         result.add(new Path(line));
         line = input.readLine();
       }
-    } finally {
-      checkAndClose(input);
     }
     return result;
   }
@@ -957,7 +955,7 @@ public class DistCpV1 implements Tool {
             throw new IllegalArgumentException("num_maps not specified in -m");
           }
           try {
-            conf.setInt(MAX_MAPS_LABEL, Integer.valueOf(args[idx]));
+            conf.setInt(MAX_MAPS_LABEL, Integer.parseInt(args[idx]));
           } catch (NumberFormatException e) {
             throw new IllegalArgumentException("Invalid argument to -m: " +
                                                args[idx]);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef84c33e/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java
b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java
index 4e1a6aa..2c89cb0 100644
--- a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java
+++ b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java
@@ -23,6 +23,7 @@ import java.io.DataOutput;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStreamReader;
+import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
@@ -96,14 +97,11 @@ abstract class DistTool implements org.apache.hadoop.util.Tool {
       ) throws IOException {
     List<String> result = new ArrayList<String>();
     FileSystem fs = inputfile.getFileSystem(conf);
-    BufferedReader input = null;
-    try {
-      input = new BufferedReader(new InputStreamReader(fs.open(inputfile)));
+    try (BufferedReader input = new BufferedReader(new InputStreamReader(fs.open(inputfile),
+            Charset.forName("UTF-8")))) {
       for(String line; (line = input.readLine()) != null;) {
         result.add(line);
       }
-    } finally {
-      input.close();
     }
     return result;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef84c33e/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
index c3c8e90..050bfbe 100644
--- a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
+++ b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.tools;
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
+import java.nio.charset.Charset;
 import java.util.Random;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -155,15 +156,15 @@ public class Logalyzer {
         
         //Compare column-wise according to *sortSpec*
         for(int i=0; i < sortSpec.length; ++i) {
-          int column = (Integer.valueOf(sortSpec[i]).intValue());
+          int column = Integer.parseInt(sortSpec[i]);
           String c1 = logColumns1[column]; 
           String c2 = logColumns2[column];
           
           //Compare columns
           int comparision = super.compareBytes(
-                                               c1.getBytes(), 0, c1.length(),
-                                               c2.getBytes(), 0, c2.length()
-                                               );
+                                  c1.getBytes(Charset.forName("UTF-8")), 0, c1.length(),
+                                  c2.getBytes(Charset.forName("UTF-8")), 0, c2.length()
+                                  );
           
           //They differ!
           if (comparision != 0) {


Mime
View raw message