hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1399950 [11/17] - in /hadoop/common/branches/HDFS-2802/hadoop-common-project: hadoop-annotations/ hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/ hadoop-auth-examples/ hadoop-auth/ hadoop-auth/src/main/java/org/apa...
Date Fri, 19 Oct 2012 02:27:38 GMT
Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java Fri Oct 19 02:25:55 2012
@@ -60,6 +60,7 @@ import org.apache.hadoop.metrics2.lib.Me
 import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder;
 import org.apache.hadoop.metrics2.lib.MutableStat;
 import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.util.Time;
 
 /**
  * A base class for metrics system singletons
@@ -234,11 +235,9 @@ public class MetricsSystemImpl extends M
   void registerSource(String name, String desc, MetricsSource source) {
     checkNotNull(config, "config");
     MetricsConfig conf = sourceConfigs.get(name);
-    MetricsSourceAdapter sa = conf != null
-        ? new MetricsSourceAdapter(prefix, name, desc, source,
-                                   injectedTags, period, conf)
-        : new MetricsSourceAdapter(prefix, name, desc, source,
-          injectedTags, period, config.subset(SOURCE_KEY));
+    MetricsSourceAdapter sa = new MetricsSourceAdapter(prefix, name, desc,
+        source, injectedTags, period, conf != null ? conf
+            : config.subset(SOURCE_KEY));
     sources.put(name, sa);
     sa.start();
     LOG.debug("Registered source "+ name);
@@ -286,8 +285,7 @@ public class MetricsSystemImpl extends M
               throws Throwable {
             try {
               return method.invoke(callback, args);
-            }
-            catch (Exception e) {
+            } catch (Exception e) {
               // These are not considered fatal.
               LOG.warn("Caught exception in callback "+ method.getName(), e);
             }
@@ -331,11 +329,11 @@ public class MetricsSystemImpl extends M
     long millis = period * 1000;
     timer = new Timer("Timer for '"+ prefix +"' metrics system", true);
     timer.scheduleAtFixedRate(new TimerTask() {
+          @Override
           public void run() {
             try {
               onTimerEvent();
-            }
-            catch (Exception e) {
+            } catch (Exception e) {
               LOG.warn(e);
             }
           }
@@ -372,10 +370,10 @@ public class MetricsSystemImpl extends M
 
   private void snapshotMetrics(MetricsSourceAdapter sa,
                                MetricsBufferBuilder bufferBuilder) {
-    long startTime = System.currentTimeMillis();
+    long startTime = Time.now();
     bufferBuilder.add(sa.name(), sa.getMetrics(collector, false));
     collector.clear();
-    snapshotStat.add(System.currentTimeMillis() - startTime);
+    snapshotStat.add(Time.now() - startTime);
     LOG.debug("Snapshotted source "+ sa.name());
   }
 
@@ -386,9 +384,9 @@ public class MetricsSystemImpl extends M
   synchronized void publishMetrics(MetricsBuffer buffer) {
     int dropped = 0;
     for (MetricsSinkAdapter sa : sinks.values()) {
-      long startTime = System.currentTimeMillis();
+      long startTime = Time.now();
       dropped += sa.putMetrics(buffer, logicalTime) ? 0 : 1;
-      publishStat.add(System.currentTimeMillis() - startTime);
+      publishStat.add(Time.now() - startTime);
     }
     droppedPubAll.incr(dropped);
   }
@@ -450,8 +448,7 @@ public class MetricsSystemImpl extends M
             conf.getString(DESC_KEY, sinkName), conf);
         sa.start();
         sinks.put(sinkName, sa);
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         LOG.warn("Error creating sink '"+ sinkName +"'", e);
       }
     }
@@ -493,8 +490,7 @@ public class MetricsSystemImpl extends M
   static String getHostname() {
     try {
       return InetAddress.getLocalHost().getHostName();
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       LOG.error("Error getting localhost name. Using 'localhost'...", e);
     }
     return "localhost";
@@ -554,6 +550,7 @@ public class MetricsSystemImpl extends M
     return true;
   }
 
+  @Override
   public MetricsSource getSource(String name) {
     return allSources.get(name);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java Fri Oct 19 02:25:55 2012
@@ -27,8 +27,13 @@ import org.apache.hadoop.metrics2.Metric
 import org.apache.hadoop.metrics2.MetricsSystem;
 import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
 
+import com.google.common.annotations.VisibleForTesting;
+
 /**
- * The default metrics system singleton
+ * The default metrics system singleton. This class is used by all the daemon
+ * processes(such as NameNode, DataNode, JobTracker etc.). During daemon process
+ * initialization the processes call {@link DefaultMetricsSystem#init(String)}
+ * to initialize the {@link MetricsSystem}.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
@@ -37,7 +42,10 @@ public enum DefaultMetricsSystem {
 
   private AtomicReference<MetricsSystem> impl =
       new AtomicReference<MetricsSystem>(new MetricsSystemImpl());
+  
+  @VisibleForTesting
   volatile boolean miniClusterMode = false;
+  
   final UniqueNames mBeanNames = new UniqueNames();
   final UniqueNames sourceNames = new UniqueNames();
 
@@ -87,12 +95,12 @@ public enum DefaultMetricsSystem {
 
   MetricsSystem getImpl() { return impl.get(); }
 
-  @InterfaceAudience.Private
+  @VisibleForTesting
   public static void setMiniClusterMode(boolean choice) {
     INSTANCE.miniClusterMode = choice;
   }
 
-  @InterfaceAudience.Private
+  @VisibleForTesting
   public static boolean inMiniClusterMode() {
     return INSTANCE.miniClusterMode;
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java Fri Oct 19 02:25:55 2012
@@ -76,8 +76,7 @@ class MethodMetric extends MutableMetric
             Object ret = method.invoke(obj, (Object[])null);
             if (isInt(type)) rb.addCounter(info, ((Integer) ret).intValue());
             else rb.addCounter(info, ((Long) ret).longValue());
-          }
-          catch (Exception ex) {
+          } catch (Exception ex) {
             LOG.error("Error invoking method "+ method.getName(), ex);
           }
         }
@@ -113,8 +112,7 @@ class MethodMetric extends MutableMetric
             else if (isLong(t)) rb.addGauge(info, ((Long) ret).longValue());
             else if (isFloat(t)) rb.addGauge(info, ((Float) ret).floatValue());
             else rb.addGauge(info, ((Double) ret).doubleValue());
-          }
-          catch (Exception ex) {
+          } catch (Exception ex) {
             LOG.error("Error invoking method "+ method.getName(), ex);
           }
         }
@@ -130,8 +128,7 @@ class MethodMetric extends MutableMetric
           try {
             Object ret = method.invoke(obj, (Object[]) null);
             rb.tag(info, (String) ret);
-          }
-          catch (Exception ex) {
+          } catch (Exception ex) {
             LOG.error("Error invoking method "+ method.getName(), ex);
           }
         }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java Fri Oct 19 02:25:55 2012
@@ -181,6 +181,24 @@ public class MetricsRegistry {
   }
 
   /**
+   * Create a mutable metric that estimates quantiles of a stream of values
+   * @param name of the metric
+   * @param desc metric description
+   * @param sampleName of the metric (e.g., "Ops")
+   * @param valueName of the metric (e.g., "Time" or "Latency")
+   * @param interval rollover interval of estimator in seconds
+   * @return a new quantile estimator object
+   */
+  public synchronized MutableQuantiles newQuantiles(String name, String desc,
+      String sampleName, String valueName, int interval) {
+    checkMetricName(name);
+    MutableQuantiles ret = 
+        new MutableQuantiles(name, desc, sampleName, valueName, interval);
+    metricsMap.put(name, ret);
+    return ret;
+  }
+  
+  /**
    * Create a mutable metric with stats
    * @param name  of the metric
    * @param desc  metric description

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java Fri Oct 19 02:25:55 2012
@@ -35,7 +35,16 @@ import org.apache.hadoop.metrics2.annota
 import org.apache.hadoop.metrics2.annotation.Metrics;
 
 /**
- * Helper class to build metrics source object from annotations
+ * Helper class to build {@link MetricsSource} object from annotations.
+ * <p>
+ * For a given source object:
+ * <ul>
+ * <li>Sets the {@link Field}s annotated with {@link Metric} to
+ * {@link MutableMetric} and adds it to the {@link MetricsRegistry}.</li>
+ * <li>
+ * For {@link Method}s annotated with {@link Metric} creates
+ * {@link MutableMetric} and adds it to the {@link MetricsRegistry}.</li>
+ * </ul>
  */
 @InterfaceAudience.Private
 public class MetricsSourceBuilder {
@@ -95,8 +104,7 @@ public class MetricsSourceBuilder {
         r = (MetricsRegistry) field.get(source);
         hasRegistry = r != null;
         break;
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         LOG.warn("Error accessing field "+ field, e);
         continue;
       }
@@ -116,15 +124,20 @@ public class MetricsSourceBuilder {
     return r;
   }
 
+  /**
+   * Change the declared field {@code field} in {@code source} Object to
+   * {@link MutableMetric}
+   */
   private void add(Object source, Field field) {
     for (Annotation annotation : field.getAnnotations()) {
-      if (!(annotation instanceof Metric)) continue;
+      if (!(annotation instanceof Metric)) {
+        continue;
+      }
       try {
         // skip fields already set
         field.setAccessible(true);
         if (field.get(source) != null) continue;
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         LOG.warn("Error accessing field "+ field +" annotated with"+
                  annotation, e);
         continue;
@@ -133,10 +146,9 @@ public class MetricsSourceBuilder {
                                                   registry);
       if (mutable != null) {
         try {
-          field.set(source, mutable);
+          field.set(source, mutable); // Set the source field to MutableMetric
           hasAtMetric = true;
-        }
-        catch (Exception e) {
+        } catch (Exception e) {
           throw new MetricsException("Error setting field "+ field +
                                      " annotated with "+ annotation, e);
         }
@@ -144,9 +156,12 @@ public class MetricsSourceBuilder {
     }
   }
 
+  /** Add {@link MutableMetric} for a method annotated with {@link Metric} */
   private void add(Object source, Method method) {
     for (Annotation annotation : method.getAnnotations()) {
-      if (!(annotation instanceof Metric)) continue;
+      if (!(annotation instanceof Metric)) {
+        continue;
+      }
       factory.newForMethod(source, method, (Metric) annotation, registry);
       hasAtMetric = true;
     }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java Fri Oct 19 02:25:55 2012
@@ -80,6 +80,7 @@ public class MutableGaugeLong extends Mu
     setChanged();
   }
 
+  @Override
   public void snapshot(MetricsRecordBuilder builder, boolean all) {
     if (all || changed()) {
       builder.addGauge(info(), value);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java Fri Oct 19 02:25:55 2012
@@ -130,6 +130,10 @@ public class MutableMetricsFactory {
     return Interns.info(name2, about.isEmpty() ? name2 : about);
   }
 
+  /**
+   * Remove the prefix "get", if any, from the method name. Return the
+   * capacitalized method name."
+   */
   protected String getName(Method method) {
     String methodName = method.getName();
     if (methodName.startsWith("get")) {
@@ -140,12 +144,15 @@ public class MutableMetricsFactory {
 
   protected MetricsInfo getInfo(Metric annotation, String defaultName) {
     String[] value = annotation.value();
-     if (value.length == 2) {
+    if (value.length == 2) {
+      // Use name and description from the annotation
       return Interns.info(value[0], value[1]);
     }
     if (value.length == 1) {
+      // Use description from the annotation and method name as metric name
       return Interns.info(defaultName, value[0]);
     }
+    // Use method name as metric name and description
     return Interns.info(defaultName, defaultName);
   }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java Fri Oct 19 02:25:55 2012
@@ -109,6 +109,7 @@ public class MutableStat extends Mutable
     setChanged();
   }
 
+  @Override
   public synchronized void snapshot(MetricsRecordBuilder builder, boolean all) {
     if (all || changed()) {
       numSamples += intervalStat.numSamples();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java Fri Oct 19 02:25:55 2012
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.metrics2.sink;
 
-import java.io.BufferedOutputStream;
 import java.io.File;
 import java.io.FileWriter;
 import java.io.PrintWriter;
@@ -48,8 +47,7 @@ public class FileSink implements Metrics
       writer = filename == null
           ? new PrintWriter(System.out)
           : new PrintWriter(new FileWriter(new File(filename), true));
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       throw new MetricsException("Error creating "+ filename, e);
     }
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java Fri Oct 19 02:25:55 2012
@@ -109,6 +109,7 @@ public abstract class AbstractGangliaSin
    * org.apache.hadoop.metrics2.MetricsPlugin#init(org.apache.commons.configuration
    * .SubsetConfiguration)
    */
+  @Override
   public void init(SubsetConfiguration conf) {
     LOG.debug("Initializing the GangliaSink for Ganglia metrics.");
 
@@ -155,6 +156,7 @@ public abstract class AbstractGangliaSin
    *
    * @see org.apache.hadoop.metrics2.MetricsSink#flush()
    */
+  @Override
   public void flush() {
     // nothing to do as we are not buffering data
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java Fri Oct 19 02:25:55 2012
@@ -43,6 +43,7 @@ public class GangliaSink31 extends Gangl
    * @param gSlope The slope for this metric
    * @throws IOException
    */
+  @Override
   protected void emitMetric(String groupName, String name, String type,
       String value, GangliaConf gConf, GangliaSlope gSlope) 
     throws IOException {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java Fri Oct 19 02:25:55 2012
@@ -23,7 +23,6 @@ import java.lang.management.MemoryMXBean
 import java.lang.management.MemoryUsage;
 import java.lang.management.ThreadInfo;
 import java.lang.management.ThreadMXBean;
-import static java.lang.Thread.State.*;
 import java.lang.management.GarbageCollectorMXBean;
 import java.util.Map;
 import java.util.List;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java Fri Oct 19 02:25:55 2012
@@ -24,7 +24,6 @@ import org.apache.hadoop.conf.Configurab
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java Fri Oct 19 02:25:55 2012
@@ -30,7 +30,6 @@ import java.net.UnknownHostException;
 import java.util.Collections;
 import java.util.Enumeration;
 import java.util.LinkedHashSet;
-import java.util.Set;
 import java.util.Vector;
 
 import javax.naming.NamingException;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java Fri Oct 19 02:25:55 2012
@@ -355,9 +355,8 @@ public class NetUtils {
   }
   
   /**
-   * Returns the InetSocketAddress that a client can use to connect to the
-   * given listening address.  This returns "hostname:port" of the server,
-   * or "127.0.0.1:port" when given a wildcard address of "0.0.0.0:port".
+   * Returns an InetSocketAddress that a client can use to connect to the
+   * given listening address.
    * 
    * @param addr of a listener
    * @return socket address that a client can use to connect to the server.

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java Fri Oct 19 02:25:55 2012
@@ -737,6 +737,7 @@ public class NetworkTopology {
   }
 
   /** convert a network tree to a string */
+  @Override
   public String toString() {
     // print the number of racks
     StringBuilder tree = new StringBuilder();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java Fri Oct 19 02:25:55 2012
@@ -26,7 +26,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java Fri Oct 19 02:25:55 2012
@@ -33,7 +33,7 @@ import java.util.LinkedList;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Time;
 
 /**
  * This supports input and output streams for a socket channels. 
@@ -194,7 +194,7 @@ abstract class SocketIOWithTimeout {
       }
 
       long timeoutLeft = timeout;
-      long endTime = (timeout > 0) ? (System.currentTimeMillis() + timeout): 0;
+      long endTime = (timeout > 0) ? (Time.now() + timeout): 0;
       
       while (true) {
         // we might have to call finishConnect() more than once
@@ -209,7 +209,7 @@ abstract class SocketIOWithTimeout {
         
         if (ret == 0 ||
             (timeout > 0 &&  
-              (timeoutLeft = (endTime - System.currentTimeMillis())) <= 0)) {
+              (timeoutLeft = (endTime - Time.now())) <= 0)) {
           throw new SocketTimeoutException(
                     timeoutExceptionString(channel, timeout, 
                                            SelectionKey.OP_CONNECT));
@@ -329,7 +329,7 @@ abstract class SocketIOWithTimeout {
       
       try {
         while (true) {
-          long start = (timeout == 0) ? 0 : System.currentTimeMillis();
+          long start = (timeout == 0) ? 0 : Time.now();
 
           key = channel.register(info.selector, ops);
           ret = info.selector.select(timeout);
@@ -342,7 +342,7 @@ abstract class SocketIOWithTimeout {
            * unknown reasons. So select again if required.
            */
           if (timeout > 0) {
-            timeout -= System.currentTimeMillis() - start;
+            timeout -= Time.now() - start;
             if (timeout <= 0) {
               return 0;
             }
@@ -414,7 +414,7 @@ abstract class SocketIOWithTimeout {
         selInfo = queue.removeLast();
       }
       
-      trimIdleSelectors(System.currentTimeMillis());
+      trimIdleSelectors(Time.now());
       return selInfo;
     }
     
@@ -425,7 +425,7 @@ abstract class SocketIOWithTimeout {
      * @param info
      */
     private synchronized void release(SelectorInfo info) {
-      long now = System.currentTimeMillis();
+      long now = Time.now();
       trimIdleSelectors(now);
       info.lastActivityTime = now;
       info.queue.addLast(info);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketInputStream.java Fri Oct 19 02:25:55 2012
@@ -50,6 +50,7 @@ class SocketInputStream extends InputStr
       this.channel = channel;
     }
     
+    @Override
     int performIO(ByteBuffer buf) throws IOException {
       return channel.read(buf);
     }
@@ -123,10 +124,12 @@ class SocketInputStream extends InputStr
     return ret;
   }
 
+  @Override
   public int read(byte[] b, int off, int len) throws IOException {
     return read(ByteBuffer.wrap(b, off, len));
   }
 
+  @Override
   public synchronized void close() throws IOException {
     /* close the channel since Socket.getInputStream().close()
      * closes the socket.
@@ -146,10 +149,12 @@ class SocketInputStream extends InputStr
   
   //ReadableByteChannel interface
     
+  @Override
   public boolean isOpen() {
     return reader.isOpen();
   }
     
+  @Override
   public int read(ByteBuffer dst) throws IOException {
     return reader.doIO(dst, SelectionKey.OP_READ);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketOutputStream.java Fri Oct 19 02:25:55 2012
@@ -31,8 +31,8 @@ import java.nio.channels.WritableByteCha
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.metrics2.lib.MutableRate;
-import org.apache.hadoop.util.Progressable;
 
 /**
  * This implements an output stream that can have a timeout while writing.
@@ -58,6 +58,7 @@ public class SocketOutputStream extends 
       this.channel = channel;
     }
     
+    @Override
     int performIO(ByteBuffer buf) throws IOException {
       return channel.write(buf);
     }
@@ -98,6 +99,7 @@ public class SocketOutputStream extends 
     this(socket.getChannel(), timeout);
   }
   
+  @Override
   public void write(int b) throws IOException {
     /* If we need to, we can optimize this allocation.
      * probably no need to optimize or encourage single byte writes.
@@ -107,6 +109,7 @@ public class SocketOutputStream extends 
     write(buf, 0, 1);
   }
   
+  @Override
   public void write(byte[] b, int off, int len) throws IOException {
     ByteBuffer buf = ByteBuffer.wrap(b, off, len);
     while (buf.hasRemaining()) {
@@ -126,6 +129,7 @@ public class SocketOutputStream extends 
     }
   }
 
+  @Override
   public synchronized void close() throws IOException {
     /* close the channel since Socket.getOuputStream().close() 
      * closes the socket.
@@ -145,10 +149,12 @@ public class SocketOutputStream extends 
 
   //WritableByteChannle interface 
   
+  @Override
   public boolean isOpen() {
     return writer.isOpen();
   }
 
+  @Override
   public int write(ByteBuffer src) throws IOException {
     return writer.doIO(src, SelectionKey.OP_WRITE);
   }
@@ -179,9 +185,9 @@ public class SocketOutputStream extends 
    * @param fileCh FileChannel to transfer data from.
    * @param position position within the channel where the transfer begins
    * @param count number of bytes to transfer.
-   * @param waitForWritableTime updated by the nanoseconds spent waiting for 
-   * the socket to become writable
-   * @param transferTime updated by the nanoseconds spent transferring data
+   * @param waitForWritableTime nanoseconds spent waiting for the socket 
+   *        to become writable
+   * @param transferTime nanoseconds spent transferring data
    * 
    * @throws EOFException 
    *         If end of input file is reached before requested number of 
@@ -195,8 +201,8 @@ public class SocketOutputStream extends 
    *         {@link FileChannel#transferTo(long, long, WritableByteChannel)}. 
    */
   public void transferToFully(FileChannel fileCh, long position, int count,
-      MutableRate waitForWritableTime,
-      MutableRate transferToTime) throws IOException {
+      LongWritable waitForWritableTime,
+      LongWritable transferToTime) throws IOException {
     long waitTime = 0;
     long transferTime = 0;
     while (count > 0) {
@@ -236,12 +242,12 @@ public class SocketOutputStream extends 
       waitTime += wait - start;
       transferTime += transfer - wait;
     }
-
+    
     if (waitForWritableTime != null) {
-      waitForWritableTime.add(waitTime);
+      waitForWritableTime.set(waitTime);
     }
     if (transferToTime != null) {
-      transferToTime.add(transferTime);
+      transferToTime.set(transferTime);
     }
   }
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java Fri Oct 19 02:25:55 2012
@@ -59,14 +59,12 @@ public class SocksSocketFactory extends 
     this.proxy = proxy;
   }
 
-  /* @inheritDoc */
   @Override
   public Socket createSocket() throws IOException {
 
     return new Socket(proxy);
   }
 
-  /* @inheritDoc */
   @Override
   public Socket createSocket(InetAddress addr, int port) throws IOException {
 
@@ -75,7 +73,6 @@ public class SocksSocketFactory extends 
     return socket;
   }
 
-  /* @inheritDoc */
   @Override
   public Socket createSocket(InetAddress addr, int port,
       InetAddress localHostAddr, int localPort) throws IOException {
@@ -86,7 +83,6 @@ public class SocksSocketFactory extends 
     return socket;
   }
 
-  /* @inheritDoc */
   @Override
   public Socket createSocket(String host, int port) throws IOException,
       UnknownHostException {
@@ -96,7 +92,6 @@ public class SocksSocketFactory extends 
     return socket;
   }
 
-  /* @inheritDoc */
   @Override
   public Socket createSocket(String host, int port,
       InetAddress localHostAddr, int localPort) throws IOException,
@@ -108,13 +103,11 @@ public class SocksSocketFactory extends 
     return socket;
   }
 
-  /* @inheritDoc */
   @Override
   public int hashCode() {
     return proxy.hashCode();
   }
 
-  /* @inheritDoc */
   @Override
   public boolean equals(Object obj) {
     if (this == obj)
@@ -132,12 +125,12 @@ public class SocksSocketFactory extends 
     return true;
   }
 
-  /* @inheritDoc */
+  @Override
   public Configuration getConf() {
     return this.conf;
   }
 
-  /* @inheritDoc */
+  @Override
   public void setConf(Configuration conf) {
     this.conf = conf;
     String proxyStr = conf.get("hadoop.socks.server");

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/StandardSocketFactory.java Fri Oct 19 02:25:55 2012
@@ -42,7 +42,6 @@ public class StandardSocketFactory exten
   public StandardSocketFactory() {
   }
 
-  /* @inheritDoc */
   @Override
   public Socket createSocket() throws IOException {
     /*
@@ -63,7 +62,6 @@ public class StandardSocketFactory exten
     return SocketChannel.open().socket();
   }
 
-  /* @inheritDoc */
   @Override
   public Socket createSocket(InetAddress addr, int port) throws IOException {
 
@@ -72,7 +70,6 @@ public class StandardSocketFactory exten
     return socket;
   }
 
-  /* @inheritDoc */
   @Override
   public Socket createSocket(InetAddress addr, int port,
       InetAddress localHostAddr, int localPort) throws IOException {
@@ -83,7 +80,6 @@ public class StandardSocketFactory exten
     return socket;
   }
 
-  /* @inheritDoc */
   @Override
   public Socket createSocket(String host, int port) throws IOException,
       UnknownHostException {
@@ -93,7 +89,6 @@ public class StandardSocketFactory exten
     return socket;
   }
 
-  /* @inheritDoc */
   @Override
   public Socket createSocket(String host, int port,
       InetAddress localHostAddr, int localPort) throws IOException,
@@ -105,7 +100,6 @@ public class StandardSocketFactory exten
     return socket;
   }
 
-  /* @inheritDoc */
   @Override
   public boolean equals(Object obj) {
     if (this == obj)
@@ -115,7 +109,6 @@ public class StandardSocketFactory exten
     return obj.getClass().equals(this.getClass());
   }
 
-  /* @inheritDoc */
   @Override
   public int hashCode() {
     return this.getClass().hashCode();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java Fri Oct 19 02:25:55 2012
@@ -125,6 +125,7 @@ public class TableMapping extends Cached
       }
     }
   
+    @Override
     public synchronized List<String> resolve(List<String> names) {
       if (!initialized) {
         initialized = true;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordInput.java Fri Oct 19 02:25:55 2012
@@ -41,9 +41,11 @@ public class BinaryRecordInput implement
     private BinaryIndex(int nelems) {
       this.nelems = nelems;
     }
+    @Override
     public boolean done() {
       return (nelems <= 0);
     }
+    @Override
     public void incr() {
       nelems--;
     }
@@ -56,6 +58,7 @@ public class BinaryRecordInput implement
   }
     
   private static ThreadLocal bIn = new ThreadLocal() {
+      @Override
       protected synchronized Object initialValue() {
         return new BinaryRecordInput();
       }
@@ -82,34 +85,42 @@ public class BinaryRecordInput implement
     this.in = din;
   }
     
+  @Override
   public byte readByte(final String tag) throws IOException {
     return in.readByte();
   }
     
+  @Override
   public boolean readBool(final String tag) throws IOException {
     return in.readBoolean();
   }
     
+  @Override
   public int readInt(final String tag) throws IOException {
     return Utils.readVInt(in);
   }
     
+  @Override
   public long readLong(final String tag) throws IOException {
     return Utils.readVLong(in);
   }
     
+  @Override
   public float readFloat(final String tag) throws IOException {
     return in.readFloat();
   }
     
+  @Override
   public double readDouble(final String tag) throws IOException {
     return in.readDouble();
   }
     
+  @Override
   public String readString(final String tag) throws IOException {
     return Utils.fromBinaryString(in);
   }
     
+  @Override
   public Buffer readBuffer(final String tag) throws IOException {
     final int len = Utils.readVInt(in);
     final byte[] barr = new byte[len];
@@ -117,26 +128,32 @@ public class BinaryRecordInput implement
     return new Buffer(barr);
   }
     
+  @Override
   public void startRecord(final String tag) throws IOException {
     // no-op
   }
     
+  @Override
   public void endRecord(final String tag) throws IOException {
     // no-op
   }
     
+  @Override
   public Index startVector(final String tag) throws IOException {
     return new BinaryIndex(readInt(tag));
   }
     
+  @Override
   public void endVector(final String tag) throws IOException {
     // no-op
   }
     
+  @Override
   public Index startMap(final String tag) throws IOException {
     return new BinaryIndex(readInt(tag));
   }
     
+  @Override
   public void endMap(final String tag) throws IOException {
     // no-op
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/BinaryRecordOutput.java Fri Oct 19 02:25:55 2012
@@ -45,6 +45,7 @@ public class BinaryRecordOutput implemen
   }
     
   private static ThreadLocal bOut = new ThreadLocal() {
+      @Override
       protected synchronized Object initialValue() {
         return new BinaryRecordOutput();
       }
@@ -72,34 +73,42 @@ public class BinaryRecordOutput implemen
   }
     
     
+  @Override
   public void writeByte(byte b, String tag) throws IOException {
     out.writeByte(b);
   }
     
+  @Override
   public void writeBool(boolean b, String tag) throws IOException {
     out.writeBoolean(b);
   }
     
+  @Override
   public void writeInt(int i, String tag) throws IOException {
     Utils.writeVInt(out, i);
   }
     
+  @Override
   public void writeLong(long l, String tag) throws IOException {
     Utils.writeVLong(out, l);
   }
     
+  @Override
   public void writeFloat(float f, String tag) throws IOException {
     out.writeFloat(f);
   }
     
+  @Override
   public void writeDouble(double d, String tag) throws IOException {
     out.writeDouble(d);
   }
     
+  @Override
   public void writeString(String s, String tag) throws IOException {
     Utils.toBinaryString(out, s);
   }
     
+  @Override
   public void writeBuffer(Buffer buf, String tag)
     throws IOException {
     byte[] barr = buf.get();
@@ -108,20 +117,26 @@ public class BinaryRecordOutput implemen
     out.write(barr, 0, len);
   }
     
+  @Override
   public void startRecord(Record r, String tag) throws IOException {}
     
+  @Override
   public void endRecord(Record r, String tag) throws IOException {}
     
+  @Override
   public void startVector(ArrayList v, String tag) throws IOException {
     writeInt(v.size(), tag);
   }
     
+  @Override
   public void endVector(ArrayList v, String tag) throws IOException {}
     
+  @Override
   public void startMap(TreeMap v, String tag) throws IOException {
     writeInt(v.size(), tag);
   }
     
+  @Override
   public void endMap(TreeMap v, String tag) throws IOException {}
     
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java Fri Oct 19 02:25:55 2012
@@ -187,6 +187,7 @@ public class Buffer implements Comparabl
   }
   
   // inherit javadoc
+  @Override
   public int hashCode() {
     int hash = 1;
     byte[] b = this.get();
@@ -202,6 +203,7 @@ public class Buffer implements Comparabl
    * @return Positive if this is bigger than other, 0 if they are equal, and
    *         negative if this is smaller than other.
    */
+  @Override
   public int compareTo(Object other) {
     Buffer right = ((Buffer) other);
     byte[] lb = this.get();
@@ -217,6 +219,7 @@ public class Buffer implements Comparabl
   }
   
   // inherit javadoc
+  @Override
   public boolean equals(Object other) {
     if (other instanceof Buffer && this != other) {
       return compareTo(other) == 0;
@@ -225,6 +228,7 @@ public class Buffer implements Comparabl
   }
   
   // inheric javadoc
+  @Override
   public String toString() {
     StringBuilder sb = new StringBuilder(2*count);
     for(int idx = 0; idx < count; idx++) {
@@ -245,6 +249,7 @@ public class Buffer implements Comparabl
   }
   
   // inherit javadoc
+  @Override
   public Object clone() throws CloneNotSupportedException {
     Buffer result = (Buffer) super.clone();
     result.copy(this.get(), 0, this.getCount());

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java Fri Oct 19 02:25:55 2012
@@ -38,6 +38,7 @@ public class CsvRecordInput implements R
   private PushbackReader stream;
     
   private class CsvIndex implements Index {
+    @Override
     public boolean done() {
       char c = '\0';
       try {
@@ -47,6 +48,7 @@ public class CsvRecordInput implements R
       }
       return (c == '}') ? true : false;
     }
+    @Override
     public void incr() {}
   }
     
@@ -85,19 +87,23 @@ public class CsvRecordInput implements R
     }
   }
     
+  @Override
   public byte readByte(String tag) throws IOException {
     return (byte) readLong(tag);
   }
     
+  @Override
   public boolean readBool(String tag) throws IOException {
     String sval = readField(tag);
     return "T".equals(sval) ? true : false;
   }
     
+  @Override
   public int readInt(String tag) throws IOException {
     return (int) readLong(tag);
   }
     
+  @Override
   public long readLong(String tag) throws IOException {
     String sval = readField(tag);
     try {
@@ -108,10 +114,12 @@ public class CsvRecordInput implements R
     }
   }
     
+  @Override
   public float readFloat(String tag) throws IOException {
     return (float) readDouble(tag);
   }
     
+  @Override
   public double readDouble(String tag) throws IOException {
     String sval = readField(tag);
     try {
@@ -122,18 +130,21 @@ public class CsvRecordInput implements R
     }
   }
     
+  @Override
   public String readString(String tag) throws IOException {
     String sval = readField(tag);
     return Utils.fromCSVString(sval);
   }
     
+  @Override
   public Buffer readBuffer(String tag) throws IOException {
     String sval = readField(tag);
     return Utils.fromCSVBuffer(sval);
   }
     
+  @Override
   public void startRecord(String tag) throws IOException {
-    if (tag != null && !"".equals(tag)) {
+    if (tag != null && !tag.isEmpty()) {
       char c1 = (char) stream.read();
       char c2 = (char) stream.read();
       if (c1 != 's' || c2 != '{') {
@@ -142,9 +153,10 @@ public class CsvRecordInput implements R
     }
   }
     
+  @Override
   public void endRecord(String tag) throws IOException {
     char c = (char) stream.read();
-    if (tag == null || "".equals(tag)) {
+    if (tag == null || tag.isEmpty()) {
       if (c != '\n' && c != '\r') {
         throw new IOException("Error deserializing record.");
       } else {
@@ -163,6 +175,7 @@ public class CsvRecordInput implements R
     return;
   }
     
+  @Override
   public Index startVector(String tag) throws IOException {
     char c1 = (char) stream.read();
     char c2 = (char) stream.read();
@@ -172,6 +185,7 @@ public class CsvRecordInput implements R
     return new CsvIndex();
   }
     
+  @Override
   public void endVector(String tag) throws IOException {
     char c = (char) stream.read();
     if (c != '}') {
@@ -184,6 +198,7 @@ public class CsvRecordInput implements R
     return;
   }
     
+  @Override
   public Index startMap(String tag) throws IOException {
     char c1 = (char) stream.read();
     char c2 = (char) stream.read();
@@ -193,6 +208,7 @@ public class CsvRecordInput implements R
     return new CsvIndex();
   }
     
+  @Override
   public void endMap(String tag) throws IOException {
     char c = (char) stream.read();
     if (c != '}') {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java Fri Oct 19 02:25:55 2012
@@ -61,10 +61,12 @@ public class CsvRecordOutput implements 
     }
   }
     
+  @Override
   public void writeByte(byte b, String tag) throws IOException {
     writeLong((long)b, tag);
   }
     
+  @Override
   public void writeBool(boolean b, String tag) throws IOException {
     printCommaUnlessFirst();
     String val = b ? "T" : "F";
@@ -72,32 +74,38 @@ public class CsvRecordOutput implements 
     throwExceptionOnError(tag);
   }
     
+  @Override
   public void writeInt(int i, String tag) throws IOException {
     writeLong((long)i, tag);
   }
     
+  @Override
   public void writeLong(long l, String tag) throws IOException {
     printCommaUnlessFirst();
     stream.print(l);
     throwExceptionOnError(tag);
   }
     
+  @Override
   public void writeFloat(float f, String tag) throws IOException {
     writeDouble((double)f, tag);
   }
     
+  @Override
   public void writeDouble(double d, String tag) throws IOException {
     printCommaUnlessFirst();
     stream.print(d);
     throwExceptionOnError(tag);
   }
     
+  @Override
   public void writeString(String s, String tag) throws IOException {
     printCommaUnlessFirst();
     stream.print(Utils.toCSVString(s));
     throwExceptionOnError(tag);
   }
     
+  @Override
   public void writeBuffer(Buffer buf, String tag)
     throws IOException {
     printCommaUnlessFirst();
@@ -105,16 +113,18 @@ public class CsvRecordOutput implements 
     throwExceptionOnError(tag);
   }
     
+  @Override
   public void startRecord(Record r, String tag) throws IOException {
-    if (tag != null && !"".equals(tag)) {
+    if (tag != null && ! tag.isEmpty()) {
       printCommaUnlessFirst();
       stream.print("s{");
       isFirst = true;
     }
   }
     
+  @Override
   public void endRecord(Record r, String tag) throws IOException {
-    if (tag == null || "".equals(tag)) {
+    if (tag == null || tag.isEmpty()) {
       stream.print("\n");
       isFirst = true;
     } else {
@@ -123,23 +133,27 @@ public class CsvRecordOutput implements 
     }
   }
     
+  @Override
   public void startVector(ArrayList v, String tag) throws IOException {
     printCommaUnlessFirst();
     stream.print("v{");
     isFirst = true;
   }
     
+  @Override
   public void endVector(ArrayList v, String tag) throws IOException {
     stream.print("}");
     isFirst = false;
   }
     
+  @Override
   public void startMap(TreeMap v, String tag) throws IOException {
     printCommaUnlessFirst();
     stream.print("m{");
     isFirst = true;
   }
     
+  @Override
   public void endMap(TreeMap v, String tag) throws IOException {
     stream.print("}");
     isFirst = false;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Record.java Fri Oct 19 02:25:55 2012
@@ -54,6 +54,7 @@ public abstract class Record implements 
     throws IOException;
   
   // inheric javadoc
+  @Override
   public abstract int compareTo (final Object peer) throws ClassCastException;
   
   /**
@@ -73,18 +74,21 @@ public abstract class Record implements 
   }
   
   // inherit javadoc
+  @Override
   public void write(final DataOutput out) throws java.io.IOException {
     BinaryRecordOutput bout = BinaryRecordOutput.get(out);
     this.serialize(bout);
   }
   
   // inherit javadoc
+  @Override
   public void readFields(final DataInput din) throws java.io.IOException {
     BinaryRecordInput rin = BinaryRecordInput.get(din);
     this.deserialize(rin);
   }
 
   // inherit javadoc
+  @Override
   public String toString() {
     try {
       ByteArrayOutputStream s = new ByteArrayOutputStream();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordComparator.java Fri Oct 19 02:25:55 2012
@@ -40,6 +40,7 @@ public abstract class RecordComparator e
   }
   
   // inheric JavaDoc
+  @Override
   public abstract int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2);
   
   /**

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordInput.java Fri Oct 19 02:25:55 2012
@@ -63,10 +63,13 @@ public class XmlRecordInput implements R
       valList = vlist;
     }
         
+    @Override
     public void startDocument() throws SAXException {}
         
+    @Override
     public void endDocument() throws SAXException {}
         
+    @Override
     public void startElement(String ns,
                              String sname,
                              String qname,
@@ -88,6 +91,7 @@ public class XmlRecordInput implements R
       }
     }
         
+    @Override
     public void endElement(String ns,
                            String sname,
                            String qname) throws SAXException {
@@ -98,6 +102,7 @@ public class XmlRecordInput implements R
       }
     }
         
+    @Override
     public void characters(char buf[], int offset, int len)
       throws SAXException {
       if (charsValid) {
@@ -109,6 +114,7 @@ public class XmlRecordInput implements R
   }
     
   private class XmlIndex implements Index {
+    @Override
     public boolean done() {
       Value v = valList.get(vIdx);
       if ("/array".equals(v.getType())) {
@@ -119,6 +125,7 @@ public class XmlRecordInput implements R
         return false;
       }
     }
+    @Override
     public void incr() {}
   }
     
@@ -152,6 +159,7 @@ public class XmlRecordInput implements R
     }
   }
     
+  @Override
   public byte readByte(String tag) throws IOException {
     Value v = next();
     if (!"ex:i1".equals(v.getType())) {
@@ -160,6 +168,7 @@ public class XmlRecordInput implements R
     return Byte.parseByte(v.getValue());
   }
     
+  @Override
   public boolean readBool(String tag) throws IOException {
     Value v = next();
     if (!"boolean".equals(v.getType())) {
@@ -168,6 +177,7 @@ public class XmlRecordInput implements R
     return "1".equals(v.getValue());
   }
     
+  @Override
   public int readInt(String tag) throws IOException {
     Value v = next();
     if (!"i4".equals(v.getType()) &&
@@ -177,6 +187,7 @@ public class XmlRecordInput implements R
     return Integer.parseInt(v.getValue());
   }
     
+  @Override
   public long readLong(String tag) throws IOException {
     Value v = next();
     if (!"ex:i8".equals(v.getType())) {
@@ -185,6 +196,7 @@ public class XmlRecordInput implements R
     return Long.parseLong(v.getValue());
   }
     
+  @Override
   public float readFloat(String tag) throws IOException {
     Value v = next();
     if (!"ex:float".equals(v.getType())) {
@@ -193,6 +205,7 @@ public class XmlRecordInput implements R
     return Float.parseFloat(v.getValue());
   }
     
+  @Override
   public double readDouble(String tag) throws IOException {
     Value v = next();
     if (!"double".equals(v.getType())) {
@@ -201,6 +214,7 @@ public class XmlRecordInput implements R
     return Double.parseDouble(v.getValue());
   }
     
+  @Override
   public String readString(String tag) throws IOException {
     Value v = next();
     if (!"string".equals(v.getType())) {
@@ -209,6 +223,7 @@ public class XmlRecordInput implements R
     return Utils.fromXMLString(v.getValue());
   }
     
+  @Override
   public Buffer readBuffer(String tag) throws IOException {
     Value v = next();
     if (!"string".equals(v.getType())) {
@@ -217,6 +232,7 @@ public class XmlRecordInput implements R
     return Utils.fromXMLBuffer(v.getValue());
   }
     
+  @Override
   public void startRecord(String tag) throws IOException {
     Value v = next();
     if (!"struct".equals(v.getType())) {
@@ -224,6 +240,7 @@ public class XmlRecordInput implements R
     }
   }
     
+  @Override
   public void endRecord(String tag) throws IOException {
     Value v = next();
     if (!"/struct".equals(v.getType())) {
@@ -231,6 +248,7 @@ public class XmlRecordInput implements R
     }
   }
     
+  @Override
   public Index startVector(String tag) throws IOException {
     Value v = next();
     if (!"array".equals(v.getType())) {
@@ -239,12 +257,15 @@ public class XmlRecordInput implements R
     return new XmlIndex();
   }
     
+  @Override
   public void endVector(String tag) throws IOException {}
     
+  @Override
   public Index startMap(String tag) throws IOException {
     return startVector(tag);
   }
     
+  @Override
   public void endMap(String tag) throws IOException { endVector(tag); }
 
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/XmlRecordOutput.java Fri Oct 19 02:25:55 2012
@@ -149,6 +149,7 @@ public class XmlRecordOutput implements 
     }
   }
     
+  @Override
   public void writeByte(byte b, String tag) throws IOException {
     printBeginEnvelope(tag);
     stream.print("<ex:i1>");
@@ -157,6 +158,7 @@ public class XmlRecordOutput implements 
     printEndEnvelope(tag);
   }
     
+  @Override
   public void writeBool(boolean b, String tag) throws IOException {
     printBeginEnvelope(tag);
     stream.print("<boolean>");
@@ -165,6 +167,7 @@ public class XmlRecordOutput implements 
     printEndEnvelope(tag);
   }
     
+  @Override
   public void writeInt(int i, String tag) throws IOException {
     printBeginEnvelope(tag);
     stream.print("<i4>");
@@ -173,6 +176,7 @@ public class XmlRecordOutput implements 
     printEndEnvelope(tag);
   }
     
+  @Override
   public void writeLong(long l, String tag) throws IOException {
     printBeginEnvelope(tag);
     stream.print("<ex:i8>");
@@ -181,6 +185,7 @@ public class XmlRecordOutput implements 
     printEndEnvelope(tag);
   }
     
+  @Override
   public void writeFloat(float f, String tag) throws IOException {
     printBeginEnvelope(tag);
     stream.print("<ex:float>");
@@ -189,6 +194,7 @@ public class XmlRecordOutput implements 
     printEndEnvelope(tag);
   }
     
+  @Override
   public void writeDouble(double d, String tag) throws IOException {
     printBeginEnvelope(tag);
     stream.print("<double>");
@@ -197,6 +203,7 @@ public class XmlRecordOutput implements 
     printEndEnvelope(tag);
   }
     
+  @Override
   public void writeString(String s, String tag) throws IOException {
     printBeginEnvelope(tag);
     stream.print("<string>");
@@ -205,6 +212,7 @@ public class XmlRecordOutput implements 
     printEndEnvelope(tag);
   }
     
+  @Override
   public void writeBuffer(Buffer buf, String tag)
     throws IOException {
     printBeginEnvelope(tag);
@@ -214,12 +222,14 @@ public class XmlRecordOutput implements 
     printEndEnvelope(tag);
   }
     
+  @Override
   public void startRecord(Record r, String tag) throws IOException {
     insideRecord(tag);
     stream.print("<struct>\n");
     addIndent();
   }
     
+  @Override
   public void endRecord(Record r, String tag) throws IOException {
     closeIndent();
     putIndent();
@@ -227,12 +237,14 @@ public class XmlRecordOutput implements 
     outsideRecord(tag);
   }
     
+  @Override
   public void startVector(ArrayList v, String tag) throws IOException {
     insideVector(tag);
     stream.print("<array>\n");
     addIndent();
   }
     
+  @Override
   public void endVector(ArrayList v, String tag) throws IOException {
     closeIndent();
     putIndent();
@@ -240,12 +252,14 @@ public class XmlRecordOutput implements 
     outsideVector(tag);
   }
     
+  @Override
   public void startMap(TreeMap v, String tag) throws IOException {
     insideMap(tag);
     stream.print("<array>\n");
     addIndent();
   }
     
+  @Override
   public void endMap(TreeMap v, String tag) throws IOException {
     closeIndent();
     putIndent();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CGenerator.java Fri Oct 19 02:25:55 2012
@@ -37,6 +37,7 @@ class CGenerator extends CodeGenerator {
    * and spits-out file-level elements (such as include statements etc.)
    * record-level code is generated by JRecord.
    */
+  @Override
   void genCode(String name, ArrayList<JFile> ilist,
                ArrayList<JRecord> rlist, String destDir, ArrayList<String> options)
     throws IOException {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CodeBuffer.java Fri Oct 19 02:25:55 2012
@@ -98,6 +98,7 @@ public class CodeBuffer {
     sb.append(ch);
   }
   
+  @Override
   public String toString() {
     return sb.toString();
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/Consts.java Fri Oct 19 02:25:55 2012
@@ -18,12 +18,8 @@
 
 package org.apache.hadoop.record.compiler;
 
-import java.io.IOException;
-import java.util.Iterator;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.record.RecordInput;
 
 /**
  * const definitions for Record I/O compiler

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/CppGenerator.java Fri Oct 19 02:25:55 2012
@@ -37,6 +37,7 @@ class CppGenerator extends CodeGenerator
    * and spits-out file-level elements (such as include statements etc.)
    * record-level code is generated by JRecord.
    */
+  @Override
   void genCode(String name, ArrayList<JFile> ilist,
                ArrayList<JRecord> rlist, String destDir, ArrayList<String> options)
     throws IOException {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBoolean.java Fri Oct 19 02:25:55 2012
@@ -36,20 +36,24 @@ public class JBoolean extends JType {
       super("boolean", "Bool", "Boolean", "TypeID.RIOType.BOOL");
     }
     
+    @Override
     void genCompareTo(CodeBuffer cb, String fname, String other) {
       cb.append(Consts.RIO_PREFIX + "ret = ("+fname+" == "+other+")? 0 : ("+
           fname+"?1:-1);\n");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "org.apache.hadoop.record.meta.TypeID.BoolTypeID";
     }
 
+    @Override
     void genHashCode(CodeBuffer cb, String fname) {
       cb.append(Consts.RIO_PREFIX + "ret = ("+fname+")?0:1;\n");
     }
     
     // In Binary format, boolean is written as byte. true = 1, false = 0
+    @Override
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("if ("+l+"<1) {\n");
@@ -61,6 +65,7 @@ public class JBoolean extends JType {
     }
     
     // In Binary format, boolean is written as byte. true = 1, false = 0
+    @Override
     void genCompareBytes(CodeBuffer cb) {
       cb.append("{\n");
       cb.append("if (l1<1 || l2<1) {\n");
@@ -81,6 +86,7 @@ public class JBoolean extends JType {
       super("bool");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BOOL)";
     }
@@ -93,6 +99,7 @@ public class JBoolean extends JType {
     setCType(new CType());
   }
   
+  @Override
   String getSignature() {
     return "z";
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JBuffer.java Fri Oct 19 02:25:55 2012
@@ -39,22 +39,27 @@ public class JBuffer extends JCompType {
           "org.apache.hadoop.record.Buffer", "TypeID.RIOType.BUFFER");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "org.apache.hadoop.record.meta.TypeID.BufferTypeID";
     }
 
+    @Override
     void genCompareTo(CodeBuffer cb, String fname, String other) {
       cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n");
     }
     
+    @Override
     void genEquals(CodeBuffer cb, String fname, String peer) {
       cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n");
     }
     
+    @Override
     void genHashCode(CodeBuffer cb, String fname) {
       cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n");
     }
     
+    @Override
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+
@@ -64,6 +69,7 @@ public class JBuffer extends JCompType {
       cb.append("}\n");
     }
     
+    @Override
     void genCompareBytes(CodeBuffer cb) {
       cb.append("{\n");
       cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
@@ -84,6 +90,7 @@ public class JBuffer extends JCompType {
       super(" ::std::string");
     }
     
+    @Override
     void genGetSet(CodeBuffer cb, String fname) {
       cb.append("virtual const "+getType()+"& get"+toCamelCase(fname)+"() const {\n");
       cb.append("return "+fname+";\n");
@@ -93,6 +100,7 @@ public class JBuffer extends JCompType {
       cb.append("}\n");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BUFFER)";
     }
@@ -105,6 +113,7 @@ public class JBuffer extends JCompType {
     setCType(new CCompType());
   }
   
+  @Override
   String getSignature() {
     return "B";
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JByte.java Fri Oct 19 02:25:55 2012
@@ -37,10 +37,12 @@ public class JByte extends JType {
       super("byte", "Byte", "Byte", "TypeID.RIOType.BYTE");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "org.apache.hadoop.record.meta.TypeID.ByteTypeID";
     }
 
+    @Override
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("if ("+l+"<1) {\n");
@@ -51,6 +53,7 @@ public class JByte extends JType {
       cb.append("}\n");
     }
     
+    @Override
     void genCompareBytes(CodeBuffer cb) {
       cb.append("{\n");
       cb.append("if (l1<1 || l2<1) {\n");
@@ -71,6 +74,7 @@ public class JByte extends JType {
       super("int8_t");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BYTE)";
     }
@@ -82,6 +86,7 @@ public class JByte extends JType {
     setCType(new CType());
   }
   
+  @Override
   String getSignature() {
     return "b";
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JCompType.java Fri Oct 19 02:25:55 2012
@@ -35,18 +35,22 @@ abstract class JCompType extends JType {
       super(type, suffix, wrapper, typeIDByteString);
     }
     
+    @Override
     void genCompareTo(CodeBuffer cb, String fname, String other) {
       cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n");
     }
     
+    @Override
     void genEquals(CodeBuffer cb, String fname, String peer) {
       cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n");
     }
     
+    @Override
     void genHashCode(CodeBuffer cb, String fname) {
       cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n");
     }
     
+    @Override
     void genClone(CodeBuffer cb, String fname) {
       cb.append(Consts.RIO_PREFIX + "other."+fname+" = ("+getType()+") this."+
           fname+".clone();\n");
@@ -59,6 +63,7 @@ abstract class JCompType extends JType {
       super(type);
     }
     
+    @Override
     void genGetSet(CodeBuffer cb, String fname) {
       cb.append("virtual const "+getType()+"& get"+toCamelCase(fname)+"() const {\n");
       cb.append("return "+fname+";\n");

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JDouble.java Fri Oct 19 02:25:55 2012
@@ -36,15 +36,18 @@ public class JDouble extends JType {
       super("double", "Double", "Double", "TypeID.RIOType.DOUBLE");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "org.apache.hadoop.record.meta.TypeID.DoubleTypeID";
     }
 
+    @Override
     void genHashCode(CodeBuffer cb, String fname) {
       String tmp = "Double.doubleToLongBits("+fname+")";
       cb.append(Consts.RIO_PREFIX + "ret = (int)("+tmp+"^("+tmp+">>>32));\n");
     }
     
+    @Override
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("if ("+l+"<8) {\n");
@@ -55,6 +58,7 @@ public class JDouble extends JType {
       cb.append("}\n");
     }
     
+    @Override
     void genCompareBytes(CodeBuffer cb) {
       cb.append("{\n");
       cb.append("if (l1<8 || l2<8) {\n");
@@ -77,6 +81,7 @@ public class JDouble extends JType {
       super("double");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "new ::hadoop::TypeID(::hadoop::RIOTYPE_DOUBLE)";
     }
@@ -90,6 +95,7 @@ public class JDouble extends JType {
     setCType(new CType());
   }
   
+  @Override
   String getSignature() {
     return "d";
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JFloat.java Fri Oct 19 02:25:55 2012
@@ -35,14 +35,17 @@ public class JFloat extends JType {
       super("float", "Float", "Float", "TypeID.RIOType.FLOAT");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "org.apache.hadoop.record.meta.TypeID.FloatTypeID";
     }
 
+    @Override
     void genHashCode(CodeBuffer cb, String fname) {
       cb.append(Consts.RIO_PREFIX + "ret = Float.floatToIntBits("+fname+");\n");
     }
     
+    @Override
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("if ("+l+"<4) {\n");
@@ -53,6 +56,7 @@ public class JFloat extends JType {
       cb.append("}\n");
     }
     
+    @Override
     void genCompareBytes(CodeBuffer cb) {
       cb.append("{\n");
       cb.append("if (l1<4 || l2<4) {\n");
@@ -75,6 +79,7 @@ public class JFloat extends JType {
       super("float");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "new ::hadoop::TypeID(::hadoop::RIOTYPE_FLOAT)";
     }
@@ -87,6 +92,7 @@ public class JFloat extends JType {
     setCType(new CType());
   }
   
+  @Override
   String getSignature() {
     return "f";
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JInt.java Fri Oct 19 02:25:55 2012
@@ -38,10 +38,12 @@ public class JInt extends JType {
       super("int", "Int", "Integer", "TypeID.RIOType.INT");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "org.apache.hadoop.record.meta.TypeID.IntTypeID";
     }
 
+    @Override
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
@@ -50,6 +52,7 @@ public class JInt extends JType {
       cb.append("}\n");
     }
     
+    @Override
     void genCompareBytes(CodeBuffer cb) {
       cb.append("{\n");
       cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
@@ -70,6 +73,7 @@ public class JInt extends JType {
       super("int32_t");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "new ::hadoop::TypeID(::hadoop::RIOTYPE_INT)";
     }
@@ -82,6 +86,7 @@ public class JInt extends JType {
     setCType(new CType());
   }
   
+  @Override
   String getSignature() {
     return "i";
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/compiler/JLong.java Fri Oct 19 02:25:55 2012
@@ -37,15 +37,18 @@ public class JLong extends JType {
       super("long", "Long", "Long", "TypeID.RIOType.LONG");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "org.apache.hadoop.record.meta.TypeID.LongTypeID";
     }
 
+    @Override
     void genHashCode(CodeBuffer cb, String fname) {
       cb.append(Consts.RIO_PREFIX + "ret = (int) ("+fname+"^("+
           fname+">>>32));\n");
     }
     
+    @Override
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("long i = org.apache.hadoop.record.Utils.readVLong("+b+", "+s+");\n");
@@ -54,6 +57,7 @@ public class JLong extends JType {
       cb.append("}\n");
     }
     
+    @Override
     void genCompareBytes(CodeBuffer cb) {
       cb.append("{\n");
       cb.append("long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);\n");
@@ -74,6 +78,7 @@ public class JLong extends JType {
       super("int64_t");
     }
     
+    @Override
     String getTypeIDObjectString() {
       return "new ::hadoop::TypeID(::hadoop::RIOTYPE_LONG)";
     }
@@ -86,6 +91,7 @@ public class JLong extends JType {
     setCType(new CType());
   }
   
+  @Override
   String getSignature() {
     return "l";
   }



Mime
View raw message