hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cdoug...@apache.org
Subject svn commit: r700589 - in /hadoop/core/trunk: ./ src/contrib/streaming/src/java/org/apache/hadoop/streaming/ src/core/org/apache/hadoop/metrics/spi/ src/core/org/apache/hadoop/net/ src/core/org/apache/hadoop/record/ src/hdfs/org/apache/hadoop/hdfs/serve...
Date Tue, 30 Sep 2008 21:49:02 GMT
Author: cdouglas
Date: Tue Sep 30 14:49:02 2008
New Revision: 700589

URL: http://svn.apache.org/viewvc?rev=700589&view=rev
Log:
HADOOP-4204. Fix findbugs warnings related to unused variables, naive
Number subclass instantiation, Map iteration, and badly scoped inner
classes. Contributed by Suresh Srinivas.

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
    hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
    hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
    hadoop/core/trunk/src/core/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
    hadoop/core/trunk/src/core/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java
    hadoop/core/trunk/src/core/org/apache/hadoop/net/NodeBase.java
    hadoop/core/trunk/src/core/org/apache/hadoop/record/XmlRecordInput.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/pipes/PipesNonJavaInputFormat.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=700589&r1=700588&r2=700589&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Tue Sep 30 14:49:02 2008
@@ -37,6 +37,10 @@
 
   BUG FIXES
 
+    HADOOP-4204. Fix findbugs warnings related to unused variables, naive
+    Number subclass instantiation, Map iteration, and badly scoped inner
+    classes. (Suresh Srinivas via cdouglas)
+
 Release 0.19.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java?rev=700589&r1=700588&r2=700589&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
(original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
Tue Sep 30 14:49:02 2008
@@ -20,6 +20,7 @@
 
 import java.io.*;
 import java.util.*;
+import java.util.Map.Entry;
 
 /**
  * Maps a relative pathname to an absolute pathname using the
@@ -108,12 +109,9 @@
   private static void printEnvVariables() {
     System.out.println("Environment Variables: ");
     Map<String,String> map = System.getenv();
-    Set<String> keys = map.keySet();
-    Iterator iter = keys.iterator();
-    while(iter.hasNext()) {
-      String thiskey = (String)(iter.next()); 
-      String value = map.get(thiskey);
-      System.out.println(thiskey + " = " + value);
+    Set<Entry<String, String>> entrySet = map.entrySet();
+    for(Entry<String, String> entry : entrySet) {
+      System.out.println(entry.getKey() + " = " + entry.getValue());
     }
   }
 

Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=700589&r1=700588&r2=700589&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
(original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
Tue Sep 30 14:49:02 2008
@@ -953,7 +953,7 @@
     return 0;
   }
   /** Support -jobconf x=y x1=y1 type options **/
-  class MultiPropertyOption extends PropertyOption{
+  static class MultiPropertyOption extends PropertyOption{
     private String optionString; 
     MultiPropertyOption(){
       super(); 
@@ -1051,7 +1051,7 @@
 
   protected RunningJob running_;
   protected JobID jobId_;
-  protected static String LINK_URI = "You need to specify the uris as hdfs://host:port/#linkname,"
+
+  protected static final String LINK_URI = "You need to specify the uris as hdfs://host:port/#linkname,"
+
     "Please specify a different link name for all of your caching URIs";
 
 }

Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java?rev=700589&r1=700588&r2=700589&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
(original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
Tue Sep 30 14:49:02 2008
@@ -128,8 +128,6 @@
                                      DataOutputBuffer outBufOrNull) throws IOException {
     byte[] buf = new byte[Math.max(lookAhead_, maxRecSize_)];
     int read = 0;
-    boolean success = true;
-    long skippedBytes = 0;
     bin_.mark(Math.max(lookAhead_, maxRecSize_) + 2); //mark to invalidate if we read more
     read = bin_.read(buf);
     if (read == -1) return false;
@@ -142,12 +140,9 @@
     int bufPos = 0;
     int state = synched_ ? CDATA_OUT : CDATA_UNK;
     int s = 0;
-    int matchLen = 0;
-    int LL = 120000 * 10;
 
     while (match.find(bufPos)) {
       int input;
-      matchLen = match.group(0).length();
       if (match.group(1) != null) {
         input = CDATA_BEGIN;
       } else if (match.group(2) != null) {
@@ -164,7 +159,6 @@
       }
       state = nextState(state, input, match.start());
       if (state == RECORD_ACCEPT) {
-        bufPos = match.end();
         break;
       }
       bufPos = match.end();
@@ -177,7 +171,11 @@
     if (matched) {
       int endPos = includePat ? firstMatchEnd_ : firstMatchStart_;
       bin_.reset();
-      skippedBytes = bin_.skip(endPos); //Skip succeeds as we have already read this is buffer
+
+      for (long skiplen = endPos; skiplen > 0; ) {
+        skiplen -= bin_.skip(skiplen); // Skip succeeds as we have read this buffer
+      }
+
       pos_ += endPos;
       if (outBufOrNull != null) {
         outBufOrNull.writeBytes(sbuf.substring(0,endPos));
@@ -299,6 +297,5 @@
   int firstMatchStart_ = 0; // candidate record boundary. Might just be CDATA.
   int firstMatchEnd_ = 0;
 
-  boolean isRecordMatch_;
   boolean synched_;
 }

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java?rev=700589&r1=700588&r2=700589&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java Tue
Sep 30 14:49:02 2008
@@ -31,6 +31,8 @@
 import java.util.Timer;
 import java.util.TimerTask;
 import java.util.TreeMap;
+import java.util.Map.Entry;
+
 import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsException;
@@ -298,9 +300,9 @@
     for (String recordName : bufferedData.keySet()) {
       RecordMap recordMap = bufferedData.get(recordName);
       synchronized (recordMap) {
-        for (TagMap tagMap : recordMap.keySet()) {
-          MetricMap metricMap = recordMap.get(tagMap);
-          OutputRecord outRec = new OutputRecord(tagMap, metricMap);
+        Set<Entry<TagMap, MetricMap>> entrySet = recordMap.entrySet ();
+        for (Entry<TagMap, MetricMap> entry : entrySet) {
+          OutputRecord outRec = new OutputRecord(entry.getKey(), entry.getValue());
           emitRecord(contextName, recordName, outRec);
         }
       }
@@ -338,8 +340,11 @@
         TagMap tagMap = new TagMap(tagTable); // clone tags
         recordMap.put(tagMap, metricMap);
       }
-      for (String metricName : metricUpdates.keySet()) {
-        MetricValue updateValue = metricUpdates.get(metricName);
+
+      Set<Entry<String, MetricValue>> entrySet = metricUpdates.entrySet();
+      for (Entry<String, MetricValue> entry : entrySet) {
+        String metricName = entry.getKey ();
+        MetricValue updateValue = entry.getValue ();
         Number updateNumber = updateValue.getNumber();
         Number currentNumber = metricMap.get(metricName);
         if (currentNumber == null || updateValue.isAbsolute()) {
@@ -363,16 +368,16 @@
    */
   private Number sum(Number a, Number b) {
     if (a instanceof Integer) {
-      return new Integer(a.intValue() + b.intValue());
+      return Integer.valueOf(a.intValue() + b.intValue());
     }
     else if (a instanceof Float) {
       return new Float(a.floatValue() + b.floatValue());
     }
     else if (a instanceof Short) {
-      return new Short((short)(a.shortValue() + b.shortValue()));
+      return Short.valueOf((short)(a.shortValue() + b.shortValue()));
     }
     else if (a instanceof Byte) {
-      return new Byte((byte)(a.byteValue() + b.byteValue()));
+      return Byte.valueOf((byte)(a.byteValue() + b.byteValue()));
     }
     else if (a instanceof Long) {
       return Long.valueOf((a.longValue() + b.longValue()));

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java?rev=700589&r1=700588&r2=700589&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java Tue Sep
30 14:49:02 2008
@@ -77,7 +77,7 @@
    * @throws MetricsException if the tagName conflicts with the configuration
    */
   public void setTag(String tagName, int tagValue) {
-    tagTable.put(tagName, new Integer(tagValue));
+    tagTable.put(tagName, Integer.valueOf(tagValue));
   }
     
   /**
@@ -99,7 +99,7 @@
    * @throws MetricsException if the tagName conflicts with the configuration
    */
   public void setTag(String tagName, short tagValue) {
-    tagTable.put(tagName, new Short(tagValue));
+    tagTable.put(tagName, Short.valueOf(tagValue));
   }
     
   /**
@@ -110,7 +110,7 @@
    * @throws MetricsException if the tagName conflicts with the configuration
    */
   public void setTag(String tagName, byte tagValue) {
-    tagTable.put(tagName, new Byte(tagValue));
+    tagTable.put(tagName, Byte.valueOf(tagValue));
   }
     
   /**
@@ -129,7 +129,7 @@
    * conflicts with the configuration
    */
   public void setMetric(String metricName, int metricValue) {
-    setAbsolute(metricName, new Integer(metricValue));
+    setAbsolute(metricName, Integer.valueOf(metricValue));
   }
     
   /**
@@ -153,7 +153,7 @@
    * conflicts with the configuration
    */
   public void setMetric(String metricName, short metricValue) {
-    setAbsolute(metricName, new Short(metricValue));
+    setAbsolute(metricName, Short.valueOf(metricValue));
   }
     
   /**
@@ -165,7 +165,7 @@
    * conflicts with the configuration
    */
   public void setMetric(String metricName, byte metricValue) {
-    setAbsolute(metricName, new Byte(metricValue));
+    setAbsolute(metricName, Byte.valueOf(metricValue));
   }
     
   /**
@@ -189,7 +189,7 @@
    * conflicts with the configuration
    */
   public void incrMetric(String metricName, int metricValue) {
-    setIncrement(metricName, new Integer(metricValue));
+    setIncrement(metricName, Integer.valueOf(metricValue));
   }
     
   /**
@@ -213,7 +213,7 @@
    * conflicts with the configuration
    */
   public void incrMetric(String metricName, short metricValue) {
-    setIncrement(metricName, new Short(metricValue));
+    setIncrement(metricName, Short.valueOf(metricValue));
   }
     
   /**
@@ -225,7 +225,7 @@
    * conflicts with the configuration
    */
   public void incrMetric(String metricName, byte metricValue) {
-    setIncrement(metricName, new Byte(metricValue));
+    setIncrement(metricName, Byte.valueOf(metricValue));
   }
     
   /**

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/net/NodeBase.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/net/NodeBase.java?rev=700589&r1=700588&r2=700589&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/net/NodeBase.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/net/NodeBase.java Tue Sep 30 14:49:02 2008
@@ -23,7 +23,7 @@
 
 public class NodeBase implements Node {
   public final static char PATH_SEPARATOR = '/';
-  public static String PATH_SEPARATOR_STR = "/";
+  public final static String PATH_SEPARATOR_STR = "/";
   public final static String ROOT = ""; // string representation of root
   
   protected String name; //host:port#

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/record/XmlRecordInput.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/record/XmlRecordInput.java?rev=700589&r1=700588&r2=700589&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/record/XmlRecordInput.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/record/XmlRecordInput.java Tue Sep 30 14:49:02
2008
@@ -47,7 +47,7 @@
     public String getType() { return type; }
   }
     
-  private class XMLParser extends DefaultHandler {
+  private static class XMLParser extends DefaultHandler {
     private boolean charsValid = false;
         
     private ArrayList<Value> valList;

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java?rev=700589&r1=700588&r2=700589&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java Tue Sep
30 14:49:02 2008
@@ -50,12 +50,12 @@
 
   static FSNamesystem fsn = null;
   public static InetSocketAddress nameNodeAddr;
-  public static Configuration conf = new Configuration();
+  public static final Configuration conf = new Configuration();
   public static final UnixUserGroupInformation webUGI
   = UnixUserGroupInformation.createImmutable(
       conf.getStrings(WEB_UGI_PROPERTY_NAME));
 
-  public static int defaultChunkSizeToView = 
+  public static final int defaultChunkSizeToView = 
     conf.getInt("dfs.default.chunk.view.size", 32 * 1024);
   static Random rand = new Random();
 

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/pipes/PipesNonJavaInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/pipes/PipesNonJavaInputFormat.java?rev=700589&r1=700588&r2=700589&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/pipes/PipesNonJavaInputFormat.java
(original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/pipes/PipesNonJavaInputFormat.java
Tue Sep 30 14:49:02 2008
@@ -66,7 +66,7 @@
    * {@link #next(FloatWritable, NullWritable)} with the progress as the
    * <code>key</code>.
    */
-  class PipesDummyRecordReader implements RecordReader<FloatWritable, NullWritable>
{
+  static class PipesDummyRecordReader implements RecordReader<FloatWritable, NullWritable>
{
     float progress = 0.0f;
     
     public PipesDummyRecordReader(Configuration job, InputSplit split)



Mime
View raw message