hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r525505 - in /lucene/hadoop/trunk/src: examples/org/apache/hadoop/examples/ java/org/apache/hadoop/conf/ java/org/apache/hadoop/filecache/ java/org/apache/hadoop/ipc/ java/org/apache/hadoop/metrics/file/ java/org/apache/hadoop/metrics/gangl...
Date Wed, 04 Apr 2007 12:45:52 GMT
Author: tomwhite
Date: Wed Apr  4 05:45:50 2007
New Revision: 525505

URL: http://svn.apache.org/viewvc?view=rev&rev=525505
Log:
HADOOP-1190. Fix unchecked warnings in conf, examples, filecache, ipc, metrics, net, record,
and util packages.

Modified:
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/filecache/DistributedCache.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/OutputRecord.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/Util.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/net/DNS.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordInput.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordOutput.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/MergeSort.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ProgramDriver.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Progress.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/RunJar.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java Wed Apr  4 05:45:50
2007
@@ -70,7 +70,7 @@
          jobConf.getInt("test.sort.maps_per_host", 10);
     int num_reduces = cluster.getTaskTrackers() * 
         jobConf.getInt("test.sort.reduces_per_host", cluster.getMaxTasks());
-    List otherArgs = new ArrayList();
+    List<String> otherArgs = new ArrayList<String>();
     for(int i=0; i < args.length; ++i) {
       try {
         if ("-m".equals(args[i])) {

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java Wed Apr  4
05:45:50 2007
@@ -114,7 +114,7 @@
     conf.setCombinerClass(Reduce.class);
     conf.setReducerClass(Reduce.class);
     
-    List other_args = new ArrayList();
+    List<String> other_args = new ArrayList<String>();
     for(int i=0; i < args.length; ++i) {
       try {
         if ("-m".equals(args[i])) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java Wed Apr  4 05:45:50
2007
@@ -74,8 +74,8 @@
     LogFactory.getLog("org.apache.hadoop.conf.Configuration");
 
   private boolean   quietmode = true;
-  private ArrayList defaultResources = new ArrayList();
-  private ArrayList finalResources = new ArrayList();
+  private ArrayList<Object> defaultResources = new ArrayList<Object>();
+  private ArrayList<Object> finalResources = new ArrayList<Object>();
 
   private Properties properties;
   private Properties overlay;
@@ -97,6 +97,7 @@
   }
 
   /** A new configuration with the same settings cloned from another. */
+  @SuppressWarnings("unchecked")
   public Configuration(Configuration other) {
     if (LOG.isDebugEnabled()) {
       LOG.debug(StringUtils.stringifyException
@@ -140,7 +141,9 @@
     addResource(finalResources, file);
   }
 
-  private synchronized void addResource(ArrayList resources, Object resource) {
+  private synchronized void addResource(ArrayList<Object> resources,
+      Object resource) {
+    
     resources.add(resource);                      // add to resources
     properties = null;                            // trigger reload
   }
@@ -311,14 +314,14 @@
    * @return the class object
    * @throws ClassNotFoundException if the class is not found
    */
-  public Class getClassByName(String name) throws ClassNotFoundException {
+  public Class<?> getClassByName(String name) throws ClassNotFoundException {
     return Class.forName(name, true, classLoader);
   }
   
   /** Returns the value of the <code>name</code> property as a Class.  If no
    * such property is specified, then <code>defaultValue</code> is returned.
    */
-  public Class getClass(String name, Class defaultValue) {
+  public Class<?> getClass(String name, Class<?> defaultValue) {
     String valueString = get(name);
     if (valueString == null)
       return defaultValue;
@@ -334,9 +337,11 @@
    * An error is thrown if the returned class does not implement the named
    * interface. 
    */
-  public Class getClass(String propertyName, Class defaultValue,Class xface) {
+  public Class<?> getClass(String propertyName, Class<?> defaultValue,
+      Class<?> xface) {
+    
     try {
-      Class theClass = getClass(propertyName, defaultValue);
+      Class<?> theClass = getClass(propertyName, defaultValue);
       if (theClass != null && !xface.isAssignableFrom(theClass))
         throw new RuntimeException(theClass+" not "+xface.getName());
       return theClass;
@@ -348,7 +353,9 @@
   /** Sets the value of the <code>name</code> property to the name of a class.
    * First checks that the class implements the named interface. 
    */
-  public void setClass(String propertyName, Class theClass, Class xface) {
+  public void setClass(String propertyName, Class<?> theClass,
+      Class<?> xface) {
+    
     if (!xface.isAssignableFrom(theClass))
       throw new RuntimeException(theClass+" not "+xface.getName());
     set(propertyName, theClass.getName());

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/filecache/DistributedCache.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/filecache/DistributedCache.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/filecache/DistributedCache.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/filecache/DistributedCache.java Wed Apr
 4 05:45:50 2007
@@ -37,7 +37,7 @@
  ******************************************************************************/
 public class DistributedCache {
   // cacheID to cacheStatus mapping
-  private static TreeMap cachedArchives = new TreeMap();
+  private static TreeMap<String, CacheStatus> cachedArchives = new TreeMap<String,
CacheStatus>();
   // buffer size for reading checksum files
   private static final int CRC_BUFFER_SIZE = 64 * 1024;
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java Wed Apr  4 05:45:50 2007
@@ -61,7 +61,8 @@
   
   public static final Log LOG =
     LogFactory.getLog("org.apache.hadoop.ipc.Client");
-  private Hashtable connections = new Hashtable();
+  private Hashtable<InetSocketAddress, Connection> connections =
+    new Hashtable<InetSocketAddress, Connection>();
 
   private Class valueClass;                       // class of call values
   private int timeout ;// timeout for calls
@@ -121,7 +122,8 @@
     private Socket socket = null;                 // connected socket
     private DataInputStream in;                   
     private DataOutputStream out;
-    private Hashtable calls = new Hashtable();    // currently active calls
+    // currently active calls
+    private Hashtable<Integer, Call> calls = new Hashtable<Integer, Call>();
     private Call readingCall;
     private Call writingCall;
     private int inUse = 0;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java Wed Apr  4 05:45:50 2007
@@ -134,11 +134,12 @@
   private int maxQueueSize;
 
   volatile private boolean running = true;         // true while server runs
-  private LinkedList callQueue = new LinkedList(); // queued calls
+  private LinkedList<Call> callQueue = new LinkedList<Call>(); // queued calls
 
-  private List connectionList = 
-       Collections.synchronizedList(new LinkedList()); //maintain a list
-                                                       //of client connectionss
+  private List<Connection> connectionList = 
+       Collections.synchronizedList(new LinkedList<Connection>());
+                                                       //maintain a list
+                                                       //of client connections
   private Listener listener = null;
   private int numConnections = 0;
   private Handler[] handlers = null;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/file/FileContext.java Wed Apr 
4 05:45:50 2007
@@ -25,7 +25,7 @@
 import java.io.FileWriter;
 import java.io.IOException;
 import java.io.PrintWriter;
-import java.util.Iterator;
+
 import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.metrics.MetricsException;
 import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
@@ -126,20 +126,14 @@
         writer.print(".");
         writer.print(recordName);
         String separator = ": ";
-        // for (String tagName : outRec.getTagNames()) {
-        Iterator tagIt = outRec.getTagNames().iterator();
-        while (tagIt.hasNext()) {
-            String tagName = (String) tagIt.next();
+        for (String tagName : outRec.getTagNames()) {
             writer.print(separator);
             separator = ", ";
             writer.print(tagName);
             writer.print("=");
             writer.print(outRec.getTag(tagName));
         }
-        // for (String metricName : outRec.getMetricNames()) {
-        Iterator metricIt = outRec.getMetricNames().iterator();
-        while (metricIt.hasNext()) {
-            String metricName = (String) metricIt.next();
+        for (String metricName : outRec.getMetricNames()) {
             writer.print(separator);
             separator = ", ";
             writer.print(metricName);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java Wed
Apr  4 05:45:50 2007
@@ -26,9 +26,9 @@
 import java.net.SocketAddress;
 import java.net.SocketException;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+
 import org.apache.hadoop.metrics.ContextFactory;
 import org.apache.hadoop.metrics.MetricsException;
 import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
@@ -55,8 +55,7 @@
     private static final int DEFAULT_PORT = 8649;
     private static final int BUFFER_SIZE = 1500;       // as per libgmond.c
     
-    //private static final Map<Class,String> typeTable = new HashMap<Class,String>(5);
-    private static final Map typeTable = new HashMap(5);
+    private static final Map<Class,String> typeTable = new HashMap<Class,String>(5);
     
     static {
         typeTable.put(String.class, "string");
@@ -69,16 +68,11 @@
     private byte[] buffer = new byte[BUFFER_SIZE];
     private int offset;
     
-    //private List<SocketAddress> metricsServers;
-    private List metricsServers;
-    //private Map<String,String> unitsTable;
-    private Map unitsTable;
-    //private Map<String,String> slopeTable;
-    private Map slopeTable;
-    //private Map<String,String> tmaxTable;
-    private Map tmaxTable;
-    //private Map<String,String> dmaxTable;
-    private Map dmaxTable;
+    private List<? extends SocketAddress> metricsServers;
+    private Map<String,String> unitsTable;
+    private Map<String,String> slopeTable;
+    private Map<String,String> tmaxTable;
+    private Map<String,String> dmaxTable;
     
     private DatagramSocket datagramSocket;
     
@@ -125,24 +119,18 @@
         
         // metric name formed from record name and tag values
         StringBuffer nameBuf = new StringBuffer(recordName);
-        // for (String tagName : outRec.getTagNames()) {
-        Iterator tagIt = outRec.getTagNames().iterator();
-        while (tagIt.hasNext()) {
-            String tagName = (String) tagIt.next();
-            nameBuf.append('.');
-            nameBuf.append(outRec.getTag(tagName));
+        for (String tagName : outRec.getTagNames()) {
+          nameBuf.append('.');
+          nameBuf.append(outRec.getTag(tagName));
         }
         nameBuf.append('.');
         String namePrefix = new String(nameBuf);
         
         // emit each metric in turn
-        // for (String metricName : outRec.getMetricNames()) {
-        Iterator metricIt = outRec.getMetricNames().iterator();
-        while (metricIt.hasNext()) {
-            String metricName = (String) metricIt.next();
-            Object metric = outRec.getMetric(metricName);
-            String type = (String) typeTable.get(metric.getClass());
-            emitMetric(namePrefix + metricName, type, metric.toString());
+        for (String metricName : outRec.getMetricNames()) {
+          Object metric = outRec.getMetric(metricName);
+          String type = (String) typeTable.get(metric.getClass());
+          emitMetric(namePrefix + metricName, type, metric.toString());
         }
         
     }
@@ -165,13 +153,10 @@
         xdr_int(tmax);
         xdr_int(dmax);
         
-        // for (SocketAddress socketAddress : metricsServers) {
-        Iterator socketIt = metricsServers.iterator();
-        while (socketIt.hasNext()) {
-            SocketAddress socketAddress = (SocketAddress) socketIt.next();
-            DatagramPacket packet = 
-                new DatagramPacket(buffer, offset, socketAddress);
-            datagramSocket.send(packet);
+        for (SocketAddress socketAddress : metricsServers) {
+          DatagramPacket packet = 
+              new DatagramPacket(buffer, offset, socketAddress);
+          datagramSocket.send(packet);
         }
     }
     

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
(original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
Wed Apr  4 05:45:50 2007
@@ -25,7 +25,6 @@
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
 import java.util.Timer;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java Wed
Apr  4 05:45:50 2007
@@ -33,8 +33,7 @@
 public class MetricsRecordImpl implements MetricsRecord {
     
     private TagMap tagTable = new TagMap();
-    //private Map<String,MetricValue> metricTable = new LinkedHashMap<String,MetricValue>();
-    private Map metricTable = new LinkedHashMap();
+    private Map<String,MetricValue> metricTable = new LinkedHashMap<String,MetricValue>();
     
     private String recordName;
     private AbstractMetricsContext context;
@@ -228,8 +227,7 @@
         return tagTable;
     }
 
-    //Map<String, MetricValue> getMetricTable() {
-    Map getMetricTable() {
+    Map<String, MetricValue> getMetricTable() {
         return metricTable;
     }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/OutputRecord.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/OutputRecord.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/OutputRecord.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/OutputRecord.java Wed Apr 
4 05:45:50 2007
@@ -42,8 +42,7 @@
     /**
      * Returns the set of tag names
      */
-    //public Set<String> getTagNames() {
-    public Set getTagNames() {
+    public Set<String> getTagNames() {
         return Collections.unmodifiableSet(tagMap.keySet());
     }
     
@@ -59,8 +58,7 @@
     /**
      * Returns the set of metric names.
      */
-    //public Set<String> getMetricNames() {
-    public Set getMetricNames() {
+    public Set<String> getMetricNames() {
         return Collections.unmodifiableSet(metricMap.keySet());
     }
     

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/Util.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/Util.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/Util.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/metrics/spi/Util.java Wed Apr  4 05:45:50
2007
@@ -22,6 +22,7 @@
 package org.apache.hadoop.metrics.spi;
 
 import java.net.InetSocketAddress;
+import java.net.SocketAddress;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -42,16 +43,14 @@
      * 
      * @return a list of InetSocketAddress objects.
      */
-    public static List parse(String specs, int defaultPort) {
-        List result = new ArrayList(1); // ArrayList<InetSocketAddress>
+    public static List<InetSocketAddress> parse(String specs, int defaultPort) {
+        List<InetSocketAddress> result = new ArrayList<InetSocketAddress>(1);
         if (specs == null) {
                 result.add(new InetSocketAddress("localhost", defaultPort));
         }
         else {
             String[] specStrings = specs.split("[ ,]+");
-            //for (String specString : specStrings) {
-            for (int i = 0; i < specStrings.length; i++) {
-                String specString = specStrings[i];
+            for (String specString : specStrings) {
                 int colon = specString.indexOf(':');
                 if (colon < 0 || colon == specString.length() - 1) {
                     result.add(new InetSocketAddress(specString, defaultPort));

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/net/DNS.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/net/DNS.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/net/DNS.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/net/DNS.java Wed Apr  4 05:45:50 2007
@@ -1,6 +1,5 @@
 package org.apache.hadoop.net;
 
-import java.io.IOException;
 import java.net.InetAddress;
 import java.net.NetworkInterface;
 import java.net.SocketException;
@@ -77,7 +76,7 @@
         return new String[] { InetAddress.getLocalHost()
                               .getHostAddress() };
       else {
-        Vector ips = new Vector();
+        Vector<String> ips = new Vector<String>();
         Enumeration e = netIF.getInetAddresses();
         while (e.hasMoreElements())
           ips.add(((InetAddress) e.nextElement()).getHostAddress());
@@ -119,7 +118,7 @@
   public static String[] getHosts(String strInterface, String nameserver)
     throws UnknownHostException {
     String[] ips = getIPs(strInterface);
-    Vector hosts = new Vector();
+    Vector<String> hosts = new Vector<String>();
     for (int ctr = 0; ctr < ips.length; ctr++)
       try {
         hosts.add(reverseDns(InetAddress.getByName(ips[ctr]),

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordInput.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordInput.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordInput.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordInput.java Wed Apr  4 05:45:50
2007
@@ -25,7 +25,6 @@
 import org.xml.sax.*;
 import org.xml.sax.helpers.DefaultHandler;
 import javax.xml.parsers.SAXParserFactory;
-import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.parsers.SAXParser;
 
 /**
@@ -52,9 +51,9 @@
     private class XMLParser extends DefaultHandler {
         private boolean charsValid = false;
         
-        private ArrayList valList;
+        private ArrayList<Value> valList;
         
-        private XMLParser(ArrayList vlist) {
+        private XMLParser(ArrayList<Value> vlist) {
             valList = vlist;
         }
         
@@ -96,7 +95,7 @@
         public void characters(char buf[], int offset, int len)
         throws SAXException {
             if (charsValid) {
-                Value v = (Value) valList.get(valList.size()-1);
+                Value v = valList.get(valList.size()-1);
                 v.addChars(buf, offset,len);
             }
         }
@@ -105,7 +104,7 @@
     
     private class XmlIndex implements Index {
         public boolean done() {
-            Value v = (Value) valList.get(vIdx);
+            Value v = valList.get(vIdx);
             if ("/array".equals(v.getType())) {
                 valList.set(vIdx, null);
                 vIdx++;
@@ -117,13 +116,13 @@
         public void incr() {}
     }
     
-    private ArrayList valList;
+    private ArrayList<Value> valList;
     private int vLen;
     private int vIdx;
     
     private Value next() throws IOException {
         if (vIdx < vLen) {
-            Value v = (Value) valList.get(vIdx);
+            Value v = valList.get(vIdx);
             valList.set(vIdx, null);
             vIdx++;
             return v;
@@ -135,7 +134,7 @@
     /** Creates a new instance of XmlRecordInput */
     public XmlRecordInput(InputStream in) {
       try{
-        valList = new ArrayList();
+        valList = new ArrayList<Value>();
         DefaultHandler handler = new XMLParser(valList);
         SAXParserFactory factory = SAXParserFactory.newInstance();
         SAXParser parser = factory.newSAXParser();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordOutput.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordOutput.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordOutput.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordOutput.java Wed Apr  4
05:45:50 2007
@@ -36,7 +36,7 @@
     
     private int indent = 0;
     
-    private Stack compoundStack;
+    private Stack<String> compoundStack;
     
     private void putIndent() {
         StringBuffer sb = new StringBuffer("");
@@ -56,7 +56,7 @@
     
     private void printBeginEnvelope(String tag) {
         if (!compoundStack.empty()) {
-            String s = (String) compoundStack.peek();
+            String s = compoundStack.peek();
             if ("struct".equals(s)) {
                 putIndent();
                 stream.print("<member>\n");
@@ -77,7 +77,7 @@
     
     private void printEndEnvelope(String tag) {
         if (!compoundStack.empty()) {
-            String s = (String) compoundStack.peek();
+            String s = compoundStack.peek();
             if ("struct".equals(s)) {
                 stream.print("</value>\n");
                 closeIndent();
@@ -99,7 +99,7 @@
     }
     
     private void outsideVector(String tag) throws IOException {
-        String s = (String) compoundStack.pop();
+        String s = compoundStack.pop();
         if (!"vector".equals(s)) {
             throw new IOException("Error serializing vector.");
         }
@@ -112,7 +112,7 @@
     }
     
     private void outsideMap(String tag) throws IOException {
-        String s = (String) compoundStack.pop();
+        String s = compoundStack.pop();
         if (!"map".equals(s)) {
             throw new IOException("Error serializing map.");
         }
@@ -125,7 +125,7 @@
     }
     
     private void outsideRecord(String tag) throws IOException {
-        String s = (String) compoundStack.pop();
+        String s = compoundStack.pop();
         if (!"struct".equals(s)) {
             throw new IOException("Error serializing record.");
         }
@@ -136,7 +136,7 @@
     public XmlRecordOutput(OutputStream out) {
       try {
         stream = new PrintStream(out, true, "UTF-8");
-        compoundStack = new Stack();
+        compoundStack = new Stack<String>();
       } catch (UnsupportedEncodingException ex) {
         throw new RuntimeException(ex);
       }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java Wed Apr  4 05:45:50
2007
@@ -122,9 +122,9 @@
       if (!absPath.isAbsolute()) { return absPath; }
       String sRoot = root.toUri().getPath();
       String sPath = absPath.toUri().getPath();
-      Enumeration rootTokens = new StringTokenizer(sRoot, "/");
+      Enumeration<Object> rootTokens = new StringTokenizer(sRoot, "/");
       ArrayList rList = Collections.list(rootTokens);
-      Enumeration pathTokens = new StringTokenizer(sPath, "/");
+      Enumeration<Object> pathTokens = new StringTokenizer(sPath, "/");
       ArrayList pList = Collections.list(pathTokens);
       Iterator rIter = rList.iterator();
       Iterator pIter = pList.iterator();
@@ -312,13 +312,13 @@
       jobConf.setOutputPath(fakeOutDir);
       
       // create new sequence-files for holding paths
-      ArrayList pathList = new ArrayList();
-      ArrayList finalPathList = new ArrayList();
+      ArrayList<Path> pathList = new ArrayList<Path>();
+      ArrayList<String> finalPathList = new ArrayList<String>();
       pathList.add(new Path(srcPath));
       long totalBytes = 0;
       //int part = 0;
       while(!pathList.isEmpty()) {
-        Path top = (Path) pathList.remove(0);
+        Path top = pathList.remove(0);
         if (srcfs.isFile(top)) {
           totalBytes += srcfs.getLength(top);
           top = makeRelative(rootPath, top);
@@ -349,7 +349,7 @@
         SequenceFile.Writer writer = 
           SequenceFile.createWriter(fileSys,conf,file,Text.class,Text.class);
         for (int ipath = idx; ipath < finalPathList.size(); ipath += numMaps) {
-          String path = (String) finalPathList.get(ipath);
+          String path = finalPathList.get(ipath);
           writer.append(new Text(path), new Text(""));
         }
         writer.close();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/MergeSort.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/MergeSort.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/MergeSort.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/MergeSort.java Wed Apr  4 05:45:50
2007
@@ -27,9 +27,10 @@
   IntWritable I = new IntWritable(0);
   IntWritable J = new IntWritable(0);
   
-  private Comparator comparator; //the comparator that the algo should use
+  //the comparator that the algo should use
+  private Comparator<IntWritable> comparator;
   
-  public MergeSort(Comparator comparator) {
+  public MergeSort(Comparator<IntWritable> comparator) {
     this.comparator = comparator;
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ProgramDriver.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ProgramDriver.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ProgramDriver.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ProgramDriver.java Wed Apr  4 05:45:50
2007
@@ -20,10 +20,8 @@
 
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
-import java.util.TreeMap;
-import java.util.Iterator;
 import java.util.Map;
-import java.util.Map.Entry;
+import java.util.TreeMap;
 
 /** A driver that is used to run programs added to it
  */
@@ -36,10 +34,10 @@
      * @author Owen O'Malley
      * @date april 2006
      */
-     Map programs;
+     Map<String, ProgramDescription> programs;
      
      public ProgramDriver(){
-        programs = new TreeMap();
+        programs = new TreeMap<String, ProgramDescription>();
      }
      
     static private class ProgramDescription {
@@ -82,13 +80,12 @@
 	private String description;
     }
     
-    private static void printUsage(Map programs) {
+    private static void printUsage(Map<String, ProgramDescription> programs) {
 	System.out.println("Valid program names are:");
-	for(Iterator itr=programs.entrySet().iterator(); itr.hasNext();) {
-	    Map.Entry item = (Entry) itr.next();
-	    System.out.println("  " + (String) item.getKey() + ": " +
-			       ((ProgramDescription) item.getValue()).getDescription());
-	}   
+        for(Map.Entry<String, ProgramDescription> item : programs.entrySet()) {
+            System.out.println("  " + item.getKey() + ": " +
+                       item.getValue().getDescription());         
+        } 
     }
     
     /**
@@ -128,7 +125,7 @@
 	}
 	
 	// And that it is good.
-	ProgramDescription pgm = (ProgramDescription) programs.get(args[0]);
+	ProgramDescription pgm = programs.get(args[0]);
 	if (pgm == null) {
 	    System.out.println("Unknown program '" + args[0] + "' chosen.");
 	    printUsage(programs);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Progress.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Progress.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Progress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Progress.java Wed Apr  4 05:45:50
2007
@@ -29,7 +29,7 @@
   private String status = "";
   private float progress;
   private int currentPhase;
-  private ArrayList phases = new ArrayList();
+  private ArrayList<Progress> phases = new ArrayList<Progress>();
   private Progress parent;
   private float progressPerPhase;
 
@@ -60,7 +60,7 @@
 
   /** Returns the current sub-node executing. */
   public Progress phase() {
-    return (Progress)phases.get(currentPhase);
+    return phases.get(currentPhase);
   }
 
   /** Completes this node, moving the parent node to its next child. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/RunJar.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/RunJar.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/RunJar.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/RunJar.java Wed Apr  4 05:45:50 2007
@@ -131,7 +131,7 @@
 
     unJar(file, workDir);
     
-    ArrayList classPath = new ArrayList();
+    ArrayList<URL> classPath = new ArrayList<URL>();
     classPath.add(new File(workDir+"/").toURL());
     classPath.add(file.toURL());
     classPath.add(new File(workDir, "classes/").toURL());
@@ -142,7 +142,7 @@
       }
     }
     ClassLoader loader =
-      new URLClassLoader((URL[])classPath.toArray(new URL[0]));
+      new URLClassLoader(classPath.toArray(new URL[0]));
 
     Thread.currentThread().setContextClassLoader(loader);
     Class mainClass = Class.forName(mainClassName, true, loader);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java?view=diff&rev=525505&r1=525504&r2=525505
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java Wed Apr  4 05:45:50
2007
@@ -261,7 +261,7 @@
     if (str == null)
       return null;
     StringTokenizer tokenizer = new StringTokenizer (str,",");
-    List values = new ArrayList();
+    List<String> values = new ArrayList<String>();
     while (tokenizer.hasMoreTokens()) {
       values.add(tokenizer.nextToken());
     }



Mime
View raw message