hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1150969 [2/2] - in /hadoop/common/branches/HDFS-1073/common: ./ bin/ conf/ src/ src/docs/ src/docs/cn/ src/java/ src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/shell/ src/java/org/apache/hadoo...
Date Tue, 26 Jul 2011 01:53:19 GMT
Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/util/DataChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/util/DataChecksum.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/util/DataChecksum.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/util/DataChecksum.java Tue Jul 26 01:53:10 2011
@@ -21,10 +21,12 @@ package org.apache.hadoop.util;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.util.zip.Checksum;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.ChecksumException;
 
 /**
  * This class provides inteface and utilities for processing checksums for
@@ -40,9 +42,11 @@ public class DataChecksum implements Che
   // checksum types
   public static final int CHECKSUM_NULL    = 0;
   public static final int CHECKSUM_CRC32   = 1;
+  public static final int CHECKSUM_CRC32C  = 2;
   
   private static final int CHECKSUM_NULL_SIZE  = 0;
   private static final int CHECKSUM_CRC32_SIZE = 4;
+  private static final int CHECKSUM_CRC32C_SIZE = 4;
   
   
   public static DataChecksum newDataChecksum( int type, int bytesPerChecksum ) {
@@ -57,6 +61,9 @@ public class DataChecksum implements Che
     case CHECKSUM_CRC32 :
       return new DataChecksum( CHECKSUM_CRC32, new PureJavaCrc32(), 
                                CHECKSUM_CRC32_SIZE, bytesPerChecksum );
+    case CHECKSUM_CRC32C:
+      return new DataChecksum( CHECKSUM_CRC32C, new PureJavaCrc32C(),
+                               CHECKSUM_CRC32C_SIZE, bytesPerChecksum);
     default:
       return null;  
     }
@@ -126,7 +133,7 @@ public class DataChecksum implements Che
        return 0;
      }
 
-     if ( type == CHECKSUM_CRC32 ) {
+     if ( size == 4 ) {
        out.writeInt( (int) summer.getValue() );
      } else {
        throw new IOException( "Unknown Checksum " + type );
@@ -150,7 +157,7 @@ public class DataChecksum implements Che
         return 0;
       }
 
-      if ( type == CHECKSUM_CRC32 ) {
+      if ( size == 4 ) {
         int checksum = (int) summer.getValue();
         buf[offset+0] = (byte) ((checksum >>> 24) & 0xff);
         buf[offset+1] = (byte) ((checksum >>> 16) & 0xff);
@@ -172,7 +179,7 @@ public class DataChecksum implements Che
     * @return true if the checksum matches and false otherwise.
     */
    public boolean compare( byte buf[], int offset ) {
-     if ( size > 0 && type == CHECKSUM_CRC32 ) {
+     if ( size == 4 ) {
        int checksum = ( (buf[offset+0] & 0xff) << 24 ) | 
                       ( (buf[offset+1] & 0xff) << 16 ) |
                       ( (buf[offset+2] & 0xff) << 8 )  |
@@ -234,6 +241,157 @@ public class DataChecksum implements Che
   }
   
   /**
+   * Verify that the given checksums match the given data.
+   * 
+   * The 'mark' of the ByteBuffer parameters may be modified by this function,.
+   * but the position is maintained.
+   *  
+   * @param data the DirectByteBuffer pointing to the data to verify.
+   * @param checksums the DirectByteBuffer pointing to a series of stored
+   *                  checksums
+   * @param fileName the name of the file being read, for error-reporting
+   * @param basePos the file position to which the start of 'data' corresponds
+   * @throws ChecksumException if the checksums do not match
+   */
+  public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums,
+      String fileName, long basePos)
+  throws ChecksumException {
+    if (size == 0) return;
+    
+    if (data.hasArray() && checksums.hasArray()) {
+      verifyChunkedSums(
+          data.array(), data.arrayOffset() + data.position(), data.remaining(),
+          checksums.array(), checksums.arrayOffset() + checksums.position(),
+          fileName, basePos);
+      return;
+    }
+    
+    int startDataPos = data.position();
+    data.mark();
+    checksums.mark();
+    try {
+      byte[] buf = new byte[bytesPerChecksum];
+      byte[] sum = new byte[size];
+      while (data.remaining() > 0) {
+        int n = Math.min(data.remaining(), bytesPerChecksum);
+        checksums.get(sum);
+        data.get(buf, 0, n);
+        summer.reset();
+        summer.update(buf, 0, n);
+        int calculated = (int)summer.getValue();
+        int stored = (sum[0] << 24 & 0xff000000) |
+          (sum[1] << 16 & 0xff0000) |
+          (sum[2] << 8 & 0xff00) |
+          sum[3] & 0xff;
+        if (calculated != stored) {
+          long errPos = basePos + data.position() - startDataPos - n;
+          throw new ChecksumException(
+              "Checksum error: "+ fileName + " at "+ errPos +
+              " exp: " + stored + " got: " + calculated, errPos);
+        }
+      }
+    } finally {
+      data.reset();
+      checksums.reset();
+    }
+  }
+  
+  /**
+   * Implementation of chunked verification specifically on byte arrays. This
+   * is to avoid the copy when dealing with ByteBuffers that have array backing.
+   */
+  private void verifyChunkedSums(
+      byte[] data, int dataOff, int dataLen,
+      byte[] checksums, int checksumsOff, String fileName,
+      long basePos) throws ChecksumException {
+    
+    int remaining = dataLen;
+    int dataPos = 0;
+    while (remaining > 0) {
+      int n = Math.min(remaining, bytesPerChecksum);
+      
+      summer.reset();
+      summer.update(data, dataOff + dataPos, n);
+      dataPos += n;
+      remaining -= n;
+      
+      int calculated = (int)summer.getValue();
+      int stored = (checksums[checksumsOff] << 24 & 0xff000000) |
+        (checksums[checksumsOff + 1] << 16 & 0xff0000) |
+        (checksums[checksumsOff + 2] << 8 & 0xff00) |
+        checksums[checksumsOff + 3] & 0xff;
+      checksumsOff += 4;
+      if (calculated != stored) {
+        long errPos = basePos + dataPos - n;
+        throw new ChecksumException(
+            "Checksum error: "+ fileName + " at "+ errPos +
+            " exp: " + stored + " got: " + calculated, errPos);
+      }
+    }
+  }
+
+  /**
+   * Calculate checksums for the given data.
+   * 
+   * The 'mark' of the ByteBuffer parameters may be modified by this function,
+   * but the position is maintained.
+   * 
+   * @param data the DirectByteBuffer pointing to the data to checksum.
+   * @param checksums the DirectByteBuffer into which checksums will be
+   *                  stored. Enough space must be available in this
+   *                  buffer to put the checksums.
+   */
+  public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) {
+    if (size == 0) return;
+    
+    if (data.hasArray() && checksums.hasArray()) {
+      calculateChunkedSums(data.array(), data.arrayOffset() + data.position(), data.remaining(),
+          checksums.array(), checksums.arrayOffset() + checksums.position());
+      return;
+    }
+    
+    data.mark();
+    checksums.mark();
+    try {
+      byte[] buf = new byte[bytesPerChecksum];
+      while (data.remaining() > 0) {
+        int n = Math.min(data.remaining(), bytesPerChecksum);
+        data.get(buf, 0, n);
+        summer.reset();
+        summer.update(buf, 0, n);
+        checksums.putInt((int)summer.getValue());
+      }
+    } finally {
+      data.reset();
+      checksums.reset();
+    }
+  }
+
+  /**
+   * Implementation of chunked calculation specifically on byte arrays. This
+   * is to avoid the copy when dealing with ByteBuffers that have array backing.
+   */
+  private void calculateChunkedSums(
+      byte[] data, int dataOffset, int dataLength,
+      byte[] sums, int sumsOffset) {
+
+    int remaining = dataLength;
+    while (remaining > 0) {
+      int n = Math.min(remaining, bytesPerChecksum);
+      summer.reset();
+      summer.update(data, dataOffset, n);
+      dataOffset += n;
+      remaining -= n;
+      long calculated = summer.getValue();
+      sums[sumsOffset++] = (byte) (calculated >> 24);
+      sums[sumsOffset++] = (byte) (calculated >> 16);
+      sums[sumsOffset++] = (byte) (calculated >> 8);
+      sums[sumsOffset++] = (byte) (calculated);
+    }
+  }
+
+
+  /**
    * This just provides a dummy implimentation for Checksum class
    * This is used when there is no checksum available or required for 
    * data

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/util/StringUtils.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/util/StringUtils.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/util/StringUtils.java Tue Jul 26 01:53:10 2011
@@ -29,6 +29,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Date;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Locale;
 import java.util.StringTokenizer;
@@ -741,16 +742,15 @@ public class StringUtils {
    * @param separator Separator to join with.
    * @param strings Strings to join.
    */
-  public static String join(CharSequence separator, Iterable<String> strings) {
-    StringBuilder sb = new StringBuilder();
-    boolean first = true;
-    for (String s : strings) {
-      if (first) {
-        first = false;
-      } else {
-        sb.append(separator);
-      }
-      sb.append(s);
+  public static String join(CharSequence separator, Iterable<?> strings) {
+    Iterator<?> i = strings.iterator();
+    if (!i.hasNext()) {
+      return "";
+    }
+    StringBuilder sb = new StringBuilder(i.next().toString());
+    while (i.hasNext()) {
+      sb.append(separator);
+      sb.append(i.next().toString());
     }
     return sb.toString();
   }

Modified: hadoop/common/branches/HDFS-1073/common/src/saveVersion.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/saveVersion.sh?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/saveVersion.sh (original)
+++ hadoop/common/branches/HDFS-1073/common/src/saveVersion.sh Tue Jul 26 01:53:10 2011
@@ -33,7 +33,7 @@ if git rev-parse HEAD 2>/dev/null > /dev
   url="git://${hostname}${cwd}"
 elif [ -d .svn ]; then
   revision=`svn info | sed -n -e 's/Last Changed Rev: \(.*\)/\1/p'`
-  url=`svn info | sed -n -e 's/URL: \(.*\)/\1/p'`
+  url=`svn info | sed -n -e 's/^URL: \(.*\)/\1/p'`
   # Get canonical branch (branches/X, tags/X, or trunk)
   branch=`echo $url | sed -n -e 's,.*\(branches/.*\)$,\1,p' \
                              -e 's,.*\(tags/.*\)$,\1,p' \

Propchange: hadoop/common/branches/HDFS-1073/common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Jul 26 01:53:10 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/common/src/test/core:1134995-1143556
+/hadoop/common/trunk/common/src/test/core:1134995-1150966
 /hadoop/core/branches/branch-0.19/core/src/test/core:713112
 /hadoop/core/trunk/src/test/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/conf/TestConfiguration.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/conf/TestConfiguration.java Tue Jul 26 01:53:10 2011
@@ -367,6 +367,8 @@ public class TestConfiguration extends T
     appendProperty("test.hex1", "0x10");
     appendProperty("test.hex2", "0xF");
     appendProperty("test.hex3", "-0x10");
+    // Invalid?
+    appendProperty("test.hex4", "-0x10xyz");
     endConfig();
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
@@ -376,7 +378,18 @@ public class TestConfiguration extends T
     assertEquals(15, conf.getLong("test.hex2", 0));
     assertEquals(-16, conf.getInt("test.hex3", 0));
     assertEquals(-16, conf.getLong("test.hex3", 0));
-
+    try {
+      conf.getLong("test.hex4", 0);
+      fail("Property had invalid long value, but was read successfully.");
+    } catch (NumberFormatException e) {
+      // pass
+    }
+    try {
+      conf.getInt("test.hex4", 0);
+      fail("Property had invalid int value, but was read successfully.");
+    } catch (NumberFormatException e) {
+      // pass
+    }
   }
 
   public void testIntegerValues() throws IOException{
@@ -386,6 +399,7 @@ public class TestConfiguration extends T
     appendProperty("test.int2", "020");
     appendProperty("test.int3", "-20");
     appendProperty("test.int4", " -20 ");
+    appendProperty("test.int5", " -20xyz ");
     endConfig();
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
@@ -397,6 +411,12 @@ public class TestConfiguration extends T
     assertEquals(-20, conf.getLong("test.int3", 0));
     assertEquals(-20, conf.getInt("test.int4", 0));
     assertEquals(-20, conf.getLong("test.int4", 0));
+    try {
+      conf.getInt("test.int5", 0);
+      fail("Property had invalid int value, but was read successfully.");
+    } catch (NumberFormatException e) {
+      // pass
+    }
   }
   
   public void testBooleanValues() throws IOException {
@@ -424,6 +444,7 @@ public class TestConfiguration extends T
     appendProperty("test.float2", "003.1415");
     appendProperty("test.float3", "-3.1415");
     appendProperty("test.float4", " -3.1415 ");
+    appendProperty("test.float5", "xyz-3.1415xyz");
     endConfig();
     Path fileResource = new Path(CONFIG);
     conf.addResource(fileResource);
@@ -431,6 +452,12 @@ public class TestConfiguration extends T
     assertEquals(3.1415f, conf.getFloat("test.float2", 0.0f));
     assertEquals(-3.1415f, conf.getFloat("test.float3", 0.0f));
     assertEquals(-3.1415f, conf.getFloat("test.float4", 0.0f));
+    try {
+      conf.getFloat("test.float5", 0.0f);
+      fail("Property had invalid float value, but was read successfully.");
+    } catch (NumberFormatException e) {
+      // pass
+    }
   }
   
   public void testGetClass() throws IOException {

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/fs/TestTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/fs/TestTrash.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/fs/TestTrash.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/fs/TestTrash.java Tue Jul 26 01:53:10 2011
@@ -20,9 +20,11 @@ package org.apache.hadoop.fs;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeys.*;
 
+import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.io.File;
 import java.io.IOException;
+import java.io.PrintStream;
 import java.net.URI;
 import java.util.HashSet;
 import java.util.Set;
@@ -413,6 +415,30 @@ public class TestTrash extends TestCase 
       assertTrue(count==num_runs);
     }
     
+    //Verify skipTrash option is suggested when rm fails due to its absence
+    {
+      String[] args = new String[2];
+      args[0] = "-rmr";
+      args[1] = "/";  //This always contains trash directory
+      PrintStream stdout = System.out;
+      PrintStream stderr = System.err;
+      ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
+      PrintStream newOut = new PrintStream(byteStream);
+      System.setOut(newOut);
+      System.setErr(newOut);
+      try {
+        shell.run(args);
+      } catch (Exception e) {
+        System.err.println("Exception raised from Trash.run " +
+            e.getLocalizedMessage());
+      }
+      String output = byteStream.toString();
+      System.setOut(stdout);
+      System.setErr(stderr);
+      assertTrue("skipTrash wasn't suggested as remedy to failed rm command",
+        output.indexOf(("Consider using -skipTrash option")) != -1 );
+    }
+
   }
 
   public static void trashNonDefaultFS(Configuration conf) throws IOException {
@@ -454,6 +480,15 @@ public class TestTrash extends TestCase 
     trashNonDefaultFS(conf);
   }
   
+  public void testPluggableTrash() throws IOException {
+    Configuration conf = new Configuration();
+
+    // Test plugged TrashPolicy
+    conf.setClass("fs.trash.classname", TestTrashPolicy.class, TrashPolicy.class);
+    Trash trash = new Trash(conf);
+    assertTrue(trash.getTrashPolicy().getClass().equals(TestTrashPolicy.class));
+  }
+
   public void testTrashEmptier() throws Exception {
     Configuration conf = new Configuration();
     // Trash with 12 second deletes and 6 seconds checkpoints
@@ -612,4 +647,41 @@ public class TestTrash extends TestCase 
     // run performance piece as a separate test
     performanceTestDeleteSameFile();
   }
+
+  // Test TrashPolicy. Don't care about implementation.
+  public static class TestTrashPolicy extends TrashPolicy {
+    public TestTrashPolicy() { }
+
+    @Override
+    public void initialize(Configuration conf, FileSystem fs, Path home) {
+    }
+
+    @Override
+    public boolean isEnabled() {
+      return false;
+    }
+
+    @Override 
+    public boolean moveToTrash(Path path) throws IOException {
+      return false;
+    }
+
+    @Override
+    public void createCheckpoint() throws IOException {
+    }
+
+    @Override
+    public void deleteCheckpoint() throws IOException {
+    }
+
+    @Override
+    public Path getCurrentTrashDir() {
+      return null;
+    }
+
+    @Override
+    public Runnable getEmptier() throws IOException {
+      return null;
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHttpServer.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHttpServer.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHttpServer.java Tue Jul 26 01:53:10 2011
@@ -61,6 +61,7 @@ import org.junit.Test;
 import org.mockito.Mockito;
 
 public class TestHttpServer extends HttpServerFunctionalTest {
+  static final Log LOG = LogFactory.getLog(TestHttpServer.class);
   private static HttpServer server;
   private static URL baseUrl;
   private static final int MAX_THREADS = 10;
@@ -136,6 +137,7 @@ public class TestHttpServer extends Http
     server.addServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class);
     server.start();
     baseUrl = getServerURL(server);
+    LOG.info("HTTP server started: "+ baseUrl);
   }
   
   @AfterClass public static void cleanup() throws Exception {
@@ -233,9 +235,6 @@ public class TestHttpServer extends Http
    * 
    */
   public static class DummyServletFilter implements Filter {
-
-    private static final Log LOG = LogFactory.getLog(
-        DummyServletFilter.class);
     @Override
     public void destroy() { }
 

Propchange: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestSequenceFile.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Jul 26 01:53:10 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/common/src/test/core/org/apache/hadoop/io/TestSequenceFile.java:1134995-1143556
+/hadoop/common/trunk/common/src/test/core/org/apache/hadoop/io/TestSequenceFile.java:1134995-1150966
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/io/TestSequenceFile.java:713112
 /hadoop/core/trunk/src/test/mapred/org/apache/hadoop/io/TestSequenceFile.java:776175-785643

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java Tue Jul 26 01:53:10 2011
@@ -24,7 +24,6 @@ import java.io.IOException;
 import java.util.Random;
 
 import junit.framework.Assert;
-import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -35,6 +34,10 @@ import org.apache.hadoop.io.file.tfile.T
 import org.apache.hadoop.io.file.tfile.TFile.Writer;
 import org.apache.hadoop.io.file.tfile.TFile.Reader.Location;
 import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner;
+import org.apache.hadoop.util.NativeCodeLoader;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * 
@@ -42,7 +45,7 @@ import org.apache.hadoop.io.file.tfile.T
  * and LZO compression classes.
  * 
  */
-public class TestTFileByteArrays extends TestCase {
+public class TestTFileByteArrays {
   private static String ROOT =
       System.getProperty("test.build.data", "/tmp/tfile-test");
   private final static int BLOCK_SIZE = 512;
@@ -62,25 +65,29 @@ public class TestTFileByteArrays extends
   private String compression = Compression.Algorithm.GZ.getName();
   private String comparator = "memcmp";
   private String outputFile = "TFileTestByteArrays";
+
   /*
    * pre-sampled numbers of records in one block, based on the given the
-   * generated key and value strings
+   * generated key and value strings. This is slightly different based on
+   * whether or not the native libs are present.
    */
-  // private int records1stBlock = 4314;
-  // private int records2ndBlock = 4108;
-  private int records1stBlock = 4480;
-  private int records2ndBlock = 4263;
+  private int records1stBlock = NativeCodeLoader.isNativeCodeLoaded() ? 5674 : 4480;
+  private int records2ndBlock = NativeCodeLoader.isNativeCodeLoaded() ? 5574 : 4263;
 
   public void init(String compression, String comparator, String outputFile,
       int numRecords1stBlock, int numRecords2ndBlock) {
+    init(compression, comparator, outputFile);
+    this.records1stBlock = numRecords1stBlock;
+    this.records2ndBlock = numRecords2ndBlock;
+  }
+  
+  public void init(String compression, String comparator, String outputFile) {
     this.compression = compression;
     this.comparator = comparator;
     this.outputFile = outputFile;
-    this.records1stBlock = numRecords1stBlock;
-    this.records2ndBlock = numRecords2ndBlock;
   }
 
-  @Override
+  @Before
   public void setUp() throws IOException {
     conf = new Configuration();
     path = new Path(ROOT, outputFile);
@@ -89,12 +96,13 @@ public class TestTFileByteArrays extends
     writer = new Writer(out, BLOCK_SIZE, compression, comparator, conf);
   }
 
-  @Override
+  @After
   public void tearDown() throws IOException {
     if (!skip)
     fs.delete(path, true);
   }
 
+  @Test
   public void testNoDataEntry() throws IOException {
     if (skip) 
       return;
@@ -108,19 +116,21 @@ public class TestTFileByteArrays extends
     reader.close();
   }
 
+  @Test
   public void testOneDataEntry() throws IOException {
     if (skip)
       return;
     writeRecords(1);
     readRecords(1);
 
-    checkBlockIndex(1, 0, 0);
-    readValueBeforeKey(1, 0);
-    readKeyWithoutValue(1, 0);
-    readValueWithoutKey(1, 0);
-    readKeyManyTimes(1, 0);
+    checkBlockIndex(0, 0);
+    readValueBeforeKey(0);
+    readKeyWithoutValue(0);
+    readValueWithoutKey(0);
+    readKeyManyTimes(0);
   }
 
+  @Test
   public void testTwoDataEntries() throws IOException {
     if (skip)
       return;
@@ -133,6 +143,7 @@ public class TestTFileByteArrays extends
    * 
    * @throws IOException
    */
+  @Test
   public void testOneBlock() throws IOException {
     if (skip)
       return;
@@ -140,7 +151,7 @@ public class TestTFileByteArrays extends
     writeRecords(records1stBlock);
     readRecords(records1stBlock);
     // last key should be in the first block (block 0)
-    checkBlockIndex(records1stBlock, records1stBlock - 1, 0);
+    checkBlockIndex(records1stBlock - 1, 0);
   }
 
   /**
@@ -148,68 +159,70 @@ public class TestTFileByteArrays extends
    * 
    * @throws IOException
    */
+  @Test
   public void testOneBlockPlusOneEntry() throws IOException {
     if (skip)
       return;
     writeRecords(records1stBlock + 1);
     readRecords(records1stBlock + 1);
-    checkBlockIndex(records1stBlock + 1, records1stBlock - 1, 0);
-    checkBlockIndex(records1stBlock + 1, records1stBlock, 1);
+    checkBlockIndex(records1stBlock - 1, 0);
+    checkBlockIndex(records1stBlock, 1);
   }
 
+  @Test
   public void testTwoBlocks() throws IOException {
     if (skip)
       return;
     writeRecords(records1stBlock + 5);
     readRecords(records1stBlock + 5);
-    checkBlockIndex(records1stBlock + 5, records1stBlock + 4, 1);
+    checkBlockIndex(records1stBlock + 4, 1);
   }
 
+  @Test
   public void testThreeBlocks() throws IOException {
     if (skip) 
       return;
     writeRecords(2 * records1stBlock + 5);
     readRecords(2 * records1stBlock + 5);
 
-    checkBlockIndex(2 * records1stBlock + 5, 2 * records1stBlock + 4, 2);
+    checkBlockIndex(2 * records1stBlock + 4, 2);
     // 1st key in file
-    readValueBeforeKey(2 * records1stBlock + 5, 0);
-    readKeyWithoutValue(2 * records1stBlock + 5, 0);
-    readValueWithoutKey(2 * records1stBlock + 5, 0);
-    readKeyManyTimes(2 * records1stBlock + 5, 0);
+    readValueBeforeKey(0);
+    readKeyWithoutValue(0);
+    readValueWithoutKey(0);
+    readKeyManyTimes(0);
     // last key in file
-    readValueBeforeKey(2 * records1stBlock + 5, 2 * records1stBlock + 4);
-    readKeyWithoutValue(2 * records1stBlock + 5, 2 * records1stBlock + 4);
-    readValueWithoutKey(2 * records1stBlock + 5, 2 * records1stBlock + 4);
-    readKeyManyTimes(2 * records1stBlock + 5, 2 * records1stBlock + 4);
+    readValueBeforeKey(2 * records1stBlock + 4);
+    readKeyWithoutValue(2 * records1stBlock + 4);
+    readValueWithoutKey(2 * records1stBlock + 4);
+    readKeyManyTimes(2 * records1stBlock + 4);
 
     // 1st key in mid block, verify block indexes then read
-    checkBlockIndex(2 * records1stBlock + 5, records1stBlock - 1, 0);
-    checkBlockIndex(2 * records1stBlock + 5, records1stBlock, 1);
-    readValueBeforeKey(2 * records1stBlock + 5, records1stBlock);
-    readKeyWithoutValue(2 * records1stBlock + 5, records1stBlock);
-    readValueWithoutKey(2 * records1stBlock + 5, records1stBlock);
-    readKeyManyTimes(2 * records1stBlock + 5, records1stBlock);
+    checkBlockIndex(records1stBlock - 1, 0);
+    checkBlockIndex(records1stBlock, 1);
+    readValueBeforeKey(records1stBlock);
+    readKeyWithoutValue(records1stBlock);
+    readValueWithoutKey(records1stBlock);
+    readKeyManyTimes(records1stBlock);
 
     // last key in mid block, verify block indexes then read
-    checkBlockIndex(2 * records1stBlock + 5, records1stBlock + records2ndBlock
+    checkBlockIndex(records1stBlock + records2ndBlock
         - 1, 1);
-    checkBlockIndex(2 * records1stBlock + 5, records1stBlock + records2ndBlock,
-        2);
-    readValueBeforeKey(2 * records1stBlock + 5, records1stBlock
+    checkBlockIndex(records1stBlock + records2ndBlock, 2);
+    readValueBeforeKey(records1stBlock
         + records2ndBlock - 1);
-    readKeyWithoutValue(2 * records1stBlock + 5, records1stBlock
+    readKeyWithoutValue(records1stBlock
         + records2ndBlock - 1);
-    readValueWithoutKey(2 * records1stBlock + 5, records1stBlock
+    readValueWithoutKey(records1stBlock
         + records2ndBlock - 1);
-    readKeyManyTimes(2 * records1stBlock + 5, records1stBlock + records2ndBlock
+    readKeyManyTimes(records1stBlock + records2ndBlock
         - 1);
 
     // mid in mid block
-    readValueBeforeKey(2 * records1stBlock + 5, records1stBlock + 10);
-    readKeyWithoutValue(2 * records1stBlock + 5, records1stBlock + 10);
-    readValueWithoutKey(2 * records1stBlock + 5, records1stBlock + 10);
-    readKeyManyTimes(2 * records1stBlock + 5, records1stBlock + 10);
+    readValueBeforeKey(records1stBlock + 10);
+    readKeyWithoutValue(records1stBlock + 10);
+    readValueWithoutKey(records1stBlock + 10);
+    readKeyManyTimes(records1stBlock + 10);
   }
 
   Location locate(Scanner scanner, byte[] key) throws IOException {
@@ -219,27 +232,23 @@ public class TestTFileByteArrays extends
     return scanner.endLocation;
   }
   
+  @Test
   public void testLocate() throws IOException {
     if (skip)
       return;
     writeRecords(3 * records1stBlock);
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner = reader.createScanner();
-    Location loc2 =
-        locate(scanner, composeSortedKey(KEY, 3 * records1stBlock, 2)
-            .getBytes());
-    Location locLastIn1stBlock =
-        locate(scanner, composeSortedKey(KEY, 3 * records1stBlock,
-            records1stBlock - 1).getBytes());
-    Location locFirstIn2ndBlock =
-        locate(scanner, composeSortedKey(KEY, 3 * records1stBlock,
-            records1stBlock).getBytes());
+    locate(scanner, composeSortedKey(KEY, 2).getBytes());
+    locate(scanner, composeSortedKey(KEY, records1stBlock - 1).getBytes());
+    locate(scanner, composeSortedKey(KEY, records1stBlock).getBytes());
     Location locX = locate(scanner, "keyX".getBytes());
     Assert.assertEquals(scanner.endLocation, locX);
     scanner.close();
     reader.close();
   }
 
+  @Test
   public void testFailureWriterNotClosed() throws IOException {
     if (skip)
       return;
@@ -247,17 +256,16 @@ public class TestTFileByteArrays extends
     try {
       reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
       Assert.fail("Cannot read before closing the writer.");
-    }
-    catch (IOException e) {
+    } catch (IOException e) {
       // noop, expecting exceptions
-    }
-    finally {
+    } finally {
       if (reader != null) {
         reader.close();
       }
     }
   }
 
+  @Test
   public void testFailureWriteMetaBlocksWithSameName() throws IOException {
     if (skip)
       return;
@@ -271,16 +279,15 @@ public class TestTFileByteArrays extends
     outMeta.close();
     // add the same metablock
     try {
-      DataOutputStream outMeta2 =
-          writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
+      writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
       Assert.fail("Cannot create metablocks with the same name.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
     }
     closeOutput();
   }
 
+  @Test
   public void testFailureGetNonExistentMetaBlock() throws IOException {
     if (skip)
       return;
@@ -300,15 +307,14 @@ public class TestTFileByteArrays extends
     mb.close();
     try {
       DataInputStream mbBad = reader.getMetaBlock("testY");
-      Assert.assertNull(mbBad);
       Assert.fail("Error on handling non-existent metablocks.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
     }
     reader.close();
   }
 
+  @Test
   public void testFailureWriteRecordAfterMetaBlock() throws IOException {
     if (skip)
       return;
@@ -324,13 +330,13 @@ public class TestTFileByteArrays extends
     try {
       writer.append("keyY".getBytes(), "valueY".getBytes());
       Assert.fail("Cannot add key/value after start adding meta blocks.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
     }
     closeOutput();
   }
 
+  @Test
   public void testFailureReadValueManyTimes() throws IOException {
     if (skip)
       return;
@@ -346,8 +352,7 @@ public class TestTFileByteArrays extends
     try {
       scanner.entry().getValue(vbuf);
       Assert.fail("Cannot get the value mlutiple times.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
     }
 
@@ -355,6 +360,7 @@ public class TestTFileByteArrays extends
     reader.close();
   }
 
+  @Test
   public void testFailureBadCompressionCodec() throws IOException {
     if (skip)
       return;
@@ -363,13 +369,13 @@ public class TestTFileByteArrays extends
     try {
       writer = new Writer(out, BLOCK_SIZE, "BAD", comparator, conf);
       Assert.fail("Error on handling invalid compression codecs.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
       // e.printStackTrace();
     }
   }
 
+  @Test
   public void testFailureOpenEmptyFile() throws IOException {
     if (skip)
       return;
@@ -379,15 +385,14 @@ public class TestTFileByteArrays extends
     out = fs.create(path);
     out.close();
     try {
-      Reader reader =
-          new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
+      new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
       Assert.fail("Error on handling empty files.");
-    }
-    catch (EOFException e) {
+    } catch (EOFException e) {
       // noop, expecting exceptions
     }
   }
 
+  @Test
   public void testFailureOpenRandomFile() throws IOException {
     if (skip)
       return;
@@ -404,15 +409,14 @@ public class TestTFileByteArrays extends
     }
     out.close();
     try {
-      Reader reader =
-          new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
+      new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
       Assert.fail("Error on handling random files.");
-    }
-    catch (IOException e) {
+    } catch (IOException e) {
       // noop, expecting exceptions
     }
   }
 
+  @Test
   public void testFailureKeyLongerThan64K() throws IOException {
     if (skip)
       return;
@@ -421,13 +425,13 @@ public class TestTFileByteArrays extends
     rand.nextBytes(buf);
     try {
       writer.append(buf, "valueX".getBytes());
-    }
-    catch (IndexOutOfBoundsException e) {
+    } catch (IndexOutOfBoundsException e) {
       // noop, expecting exceptions
     }
     closeOutput();
   }
 
+  @Test
   public void testFailureOutOfOrderKeys() throws IOException {
     if (skip)
       return;
@@ -435,8 +439,7 @@ public class TestTFileByteArrays extends
       writer.append("keyM".getBytes(), "valueM".getBytes());
       writer.append("keyA".getBytes(), "valueA".getBytes());
       Assert.fail("Error on handling out of order keys.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
       // e.printStackTrace();
     }
@@ -444,19 +447,20 @@ public class TestTFileByteArrays extends
     closeOutput();
   }
 
+  @Test
   public void testFailureNegativeOffset() throws IOException {
     if (skip)
       return;
     try {
       writer.append("keyX".getBytes(), -1, 4, "valueX".getBytes(), 0, 6);
       Assert.fail("Error on handling negative offset.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
     }
     closeOutput();
   }
 
+  @Test
   public void testFailureNegativeOffset_2() throws IOException {
     if (skip)
       return;
@@ -467,30 +471,29 @@ public class TestTFileByteArrays extends
     try {
       scanner.lowerBound("keyX".getBytes(), -1, 4);
       Assert.fail("Error on handling negative offset.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
-    }
-    finally {
+    } finally {
       reader.close();
       scanner.close();
     }
     closeOutput();
   }
 
+  @Test
   public void testFailureNegativeLength() throws IOException {
     if (skip)
       return;
     try {
       writer.append("keyX".getBytes(), 0, -1, "valueX".getBytes(), 0, 6);
       Assert.fail("Error on handling negative length.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
     }
     closeOutput();
   }
 
+  @Test
   public void testFailureNegativeLength_2() throws IOException {
     if (skip)
       return;
@@ -501,17 +504,16 @@ public class TestTFileByteArrays extends
     try {
       scanner.lowerBound("keyX".getBytes(), 0, -1);
       Assert.fail("Error on handling negative length.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
-    }
-    finally {
+    } finally {
       scanner.close();
       reader.close();
     }
     closeOutput();
   }
 
+  @Test
   public void testFailureNegativeLength_3() throws IOException {
     if (skip)
       return;
@@ -542,6 +544,7 @@ public class TestTFileByteArrays extends
     }
   }
 
+  @Test
   public void testFailureCompressionNotWorking() throws IOException {
     if (skip)
       return;
@@ -552,6 +555,7 @@ public class TestTFileByteArrays extends
     closeOutput();
   }
 
+  @Test
   public void testFailureFileWriteNotAt0Position() throws IOException {
     if (skip)
       return;
@@ -562,8 +566,7 @@ public class TestTFileByteArrays extends
     try {
       writer = new Writer(out, BLOCK_SIZE, compression, comparator, conf);
       Assert.fail("Failed to catch file write not at position 0.");
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       // noop, expecting exceptions
     }
     closeOutput();
@@ -585,7 +588,7 @@ public class TestTFileByteArrays extends
     long rawDataSize = 0;
     int nx;
     for (nx = 0; nx < count; nx++) {
-      byte[] key = composeSortedKey(KEY, count, nx).getBytes();
+      byte[] key = composeSortedKey(KEY, nx).getBytes();
       byte[] value = (VALUE + nx).getBytes();
       writer.append(key, value);
       rawDataSize +=
@@ -599,28 +602,13 @@ public class TestTFileByteArrays extends
    * Insert some leading 0's in front of the value, to make the keys sorted.
    * 
    * @param prefix
-   * @param total
    * @param value
    * @return
    */
-  static String composeSortedKey(String prefix, int total, int value) {
+  static String composeSortedKey(String prefix, int value) {
     return String.format("%s%010d", prefix, value);
   }
 
-  /**
-   * Calculate how many digits are in the 10-based integer.
-   * 
-   * @param value
-   * @return
-   */
-  private static int numberDigits(int value) {
-    int digits = 0;
-    while ((value = value / 10) > 0) {
-      digits++;
-    }
-    return digits;
-  }
-
   private void readRecords(int count) throws IOException {
     readRecords(fs, path, count, conf);
   }
@@ -640,7 +628,7 @@ public class TestTFileByteArrays extends
         int klen = scanner.entry().getKeyLength();
         scanner.entry().getKey(kbuf);
         Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
-            count, nx));
+            nx));
 
         byte[] vbuf = new byte[BUF_SIZE];
         int vlen = scanner.entry().getValueLength();
@@ -650,30 +638,28 @@ public class TestTFileByteArrays extends
 
       Assert.assertTrue(scanner.atEnd());
       Assert.assertFalse(scanner.advance());
-    }
-    finally {
+    } finally {
       scanner.close();
       reader.close();
     }
   }
 
-  private void checkBlockIndex(int count, int recordIndex,
-      int blockIndexExpected) throws IOException {
+  private void checkBlockIndex(int recordIndex, int blockIndexExpected) throws IOException {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner = reader.createScanner();
-    scanner.seekTo(composeSortedKey(KEY, count, recordIndex).getBytes());
+    scanner.seekTo(composeSortedKey(KEY, recordIndex).getBytes());
     Assert.assertEquals(blockIndexExpected, scanner.currentLocation
         .getBlockIndex());
     scanner.close();
     reader.close();
   }
 
-  private void readValueBeforeKey(int count, int recordIndex)
+  private void readValueBeforeKey(int recordIndex)
       throws IOException {
     Reader reader =
         new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, count, recordIndex)
+        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
             .getBytes(), null);
 
     try {
@@ -686,19 +672,18 @@ public class TestTFileByteArrays extends
       int klen = scanner.entry().getKeyLength();
       scanner.entry().getKey(kbuf);
       Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
-          count, recordIndex));
-    }
-    finally {
+          recordIndex));
+    } finally {
       scanner.close();
       reader.close();
     }
   }
 
-  private void readKeyWithoutValue(int count, int recordIndex)
+  private void readKeyWithoutValue(int recordIndex)
       throws IOException {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, count, recordIndex)
+        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
             .getBytes(), null);
 
     try {
@@ -707,7 +692,7 @@ public class TestTFileByteArrays extends
       int klen1 = scanner.entry().getKeyLength();
       scanner.entry().getKey(kbuf1);
       Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
-          count, recordIndex));
+          recordIndex));
 
       if (scanner.advance() && !scanner.atEnd()) {
         // read the next key following the indexed
@@ -715,21 +700,20 @@ public class TestTFileByteArrays extends
         int klen2 = scanner.entry().getKeyLength();
         scanner.entry().getKey(kbuf2);
         Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY,
-            count, recordIndex + 1));
+            recordIndex + 1));
       }
-    }
-    finally {
+    } finally {
       scanner.close();
       reader.close();
     }
   }
 
-  private void readValueWithoutKey(int count, int recordIndex)
+  private void readValueWithoutKey(int recordIndex)
       throws IOException {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
 
     Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, count, recordIndex)
+        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
             .getBytes(), null);
 
     byte[] vbuf1 = new byte[BUF_SIZE];
@@ -749,11 +733,11 @@ public class TestTFileByteArrays extends
     reader.close();
   }
 
-  private void readKeyManyTimes(int count, int recordIndex) throws IOException {
+  private void readKeyManyTimes(int recordIndex) throws IOException {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
 
     Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, count, recordIndex)
+        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
             .getBytes(), null);
 
     // read the indexed key
@@ -761,17 +745,17 @@ public class TestTFileByteArrays extends
     int klen1 = scanner.entry().getKeyLength();
     scanner.entry().getKey(kbuf1);
     Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
-        count, recordIndex));
+        recordIndex));
 
     klen1 = scanner.entry().getKeyLength();
     scanner.entry().getKey(kbuf1);
     Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
-        count, recordIndex));
+        recordIndex));
 
     klen1 = scanner.entry().getKeyLength();
     scanner.entry().getKey(kbuf1);
     Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY,
-        count, recordIndex));
+        recordIndex));
 
     scanner.close();
     reader.close();

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java Tue Jul 26 01:53:10 2011
@@ -38,7 +38,7 @@ public class TestTFileJClassComparatorBy
   public void setUp() throws IOException {
     init(Compression.Algorithm.GZ.getName(),
         "jclass: org.apache.hadoop.io.file.tfile.MyComparator",
-        "TFileTestJClassComparator", 4480, 4263);
+        "TFileTestJClassComparator");
     super.setUp();
   }
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileStreams.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileStreams.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileStreams.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/file/tfile/TestTFileStreams.java Tue Jul 26 01:53:10 2011
@@ -383,7 +383,7 @@ public class TestTFileStreams extends Te
       boolean knownValueLength, boolean close) throws IOException {
     long rawDataSize = 0;
     for (int nx = 0; nx < count; nx++) {
-      String key = TestTFileByteArrays.composeSortedKey("key", count, nx);
+      String key = TestTFileByteArrays.composeSortedKey("key", nx);
       DataOutputStream outKey =
           writer.prepareAppendKey(knownKeyLength ? key.length() : -1);
       outKey.write(key.getBytes());

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/retry/UnreliableImplementation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/retry/UnreliableImplementation.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/retry/UnreliableImplementation.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/retry/UnreliableImplementation.java Tue Jul 26 01:53:10 2011
@@ -15,16 +15,44 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.io.retry;
 
+import java.io.IOException;
+
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.StandbyException;
 
 public class UnreliableImplementation implements UnreliableInterface {
 
   private int failsOnceInvocationCount,
     failsOnceWithValueInvocationCount,
-    failsTenTimesInvocationCount;
+    failsTenTimesInvocationCount,
+    succeedsOnceThenFailsCount,
+    succeedsOnceThenFailsIdempotentCount,
+    succeedsTenTimesThenFailsCount;
+  
+  private String identifier;
+  private TypeOfExceptionToFailWith exceptionToFailWith;
+  
+  public static enum TypeOfExceptionToFailWith {
+    UNRELIABLE_EXCEPTION,
+    STANDBY_EXCEPTION,
+    IO_EXCEPTION
+  }
+  
+  public UnreliableImplementation() {
+    this(null);
+  }
+  
+  public UnreliableImplementation(String identifier) {
+    this(identifier, TypeOfExceptionToFailWith.UNRELIABLE_EXCEPTION);
+  }
+  
+  public UnreliableImplementation(String identifier,
+      TypeOfExceptionToFailWith exceptionToFailWith) {
+    this.identifier = identifier;
+    this.exceptionToFailWith = exceptionToFailWith;
+  }
   
   public void alwaysSucceeds() {
     // do nothing
@@ -57,4 +85,60 @@ public class UnreliableImplementation im
     }
   }
 
+  @Override
+  public String succeedsOnceThenFailsReturningString()
+      throws UnreliableException, IOException, StandbyException {
+    if (succeedsOnceThenFailsCount++ < 1) {
+      return identifier;
+    } else {
+      switch (exceptionToFailWith) {
+      case STANDBY_EXCEPTION:
+        throw new StandbyException(identifier);
+      case UNRELIABLE_EXCEPTION:
+        throw new UnreliableException(identifier);
+      case IO_EXCEPTION:
+        throw new IOException(identifier);
+      }
+      return null;
+    }
+  }
+
+  @Override
+  public String succeedsTenTimesThenFailsReturningString()
+      throws UnreliableException, IOException, StandbyException {
+    if (succeedsTenTimesThenFailsCount++ < 10) {
+      return identifier;
+    } else {
+      switch (exceptionToFailWith) {
+      case STANDBY_EXCEPTION:
+        throw new StandbyException(identifier);
+      case UNRELIABLE_EXCEPTION:
+        throw new UnreliableException(identifier);
+      case IO_EXCEPTION:
+        throw new IOException(identifier);
+      default:
+        throw new RuntimeException(identifier);
+      }
+    }
+  }
+
+  @Override
+  public String succeedsOnceThenFailsReturningStringIdempotent()
+      throws UnreliableException, StandbyException, IOException {
+    if (succeedsOnceThenFailsIdempotentCount++ < 1) {
+      return identifier;
+    } else {
+      switch (exceptionToFailWith) {
+      case STANDBY_EXCEPTION:
+        throw new StandbyException(identifier);
+      case UNRELIABLE_EXCEPTION:
+        throw new UnreliableException(identifier);
+      case IO_EXCEPTION:
+        throw new IOException(identifier);
+      default:
+        throw new RuntimeException(identifier);
+      }
+    }
+  }
+
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/retry/UnreliableInterface.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/retry/UnreliableInterface.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/retry/UnreliableInterface.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/retry/UnreliableInterface.java Tue Jul 26 01:53:10 2011
@@ -18,12 +18,28 @@
 
 package org.apache.hadoop.io.retry;
 
+import java.io.IOException;
+
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.StandbyException;
 
 public interface UnreliableInterface {
   
   public static class UnreliableException extends Exception {
-    // no body
+    private String identifier;
+    
+    public UnreliableException() {
+      // no body
+    }
+    
+    public UnreliableException(String identifier) {
+      this.identifier = identifier;
+    }
+    
+    @Override
+    public String getMessage() {
+      return identifier;
+    }
   }
   
   public static class FatalException extends UnreliableException {
@@ -39,4 +55,12 @@ public interface UnreliableInterface {
   boolean failsOnceThenSucceedsWithReturnValue() throws UnreliableException;
 
   void failsTenTimesThenSucceeds() throws UnreliableException;
+  
+  public String succeedsOnceThenFailsReturningString()
+      throws UnreliableException, StandbyException, IOException;
+  @Idempotent
+  public String succeedsOnceThenFailsReturningStringIdempotent()
+      throws UnreliableException, StandbyException, IOException;
+  public String succeedsTenTimesThenFailsReturningString()
+      throws UnreliableException, StandbyException, IOException;
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java Tue Jul 26 01:53:10 2011
@@ -193,7 +193,7 @@ public class TestSaslRPC {
   public static class CustomSecurityInfo extends SecurityInfo {
 
     @Override
-    public KerberosInfo getKerberosInfo(Class<?> protocol) {
+    public KerberosInfo getKerberosInfo(Class<?> protocol, Configuration conf) {
       return new KerberosInfo() {
         @Override
         public Class<? extends Annotation> annotationType() {
@@ -211,7 +211,7 @@ public class TestSaslRPC {
     }
 
     @Override
-    public TokenInfo getTokenInfo(Class<?> protocol) {
+    public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) {
       return new TokenInfo() {
         @Override
         public Class<? extends TokenSelector<? extends 

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/metrics2/util/TestMetricsCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/metrics2/util/TestMetricsCache.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/metrics2/util/TestMetricsCache.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/metrics2/util/TestMetricsCache.java Tue Jul 26 01:53:10 2011
@@ -35,6 +35,7 @@ import static org.apache.hadoop.metrics2
 public class TestMetricsCache {
   private static final Log LOG = LogFactory.getLog(TestMetricsCache.class);
 
+  @SuppressWarnings("deprecation")
   @Test public void testUpdate() {
     MetricsCache cache = new MetricsCache();
     MetricsRecord mr = makeRecord("r",
@@ -54,25 +55,26 @@ public class TestMetricsCache {
         Arrays.asList(makeMetric("m", 2), makeMetric("m2", 42)));
     cr = cache.update(mr2);
     assertEquals("contains 3 metric", 3, cr.metrics().size());
-    assertEquals("updated metric value", 2, cr.getMetric("m"));
-    assertEquals("old metric value", 1, cr.getMetric("m1"));
-    assertEquals("new metric value", 42, cr.getMetric("m2"));
+    checkMetricValue("updated metric value", cr, "m", 2);
+    checkMetricValue("old metric value", cr, "m1", 1);
+    checkMetricValue("new metric value", cr, "m2", 42);
 
     MetricsRecord mr3 = makeRecord("r",
         Arrays.asList(makeTag("t", "tv3")), // different tag value
         Arrays.asList(makeMetric("m3", 3)));
     cr = cache.update(mr3); // should get a new record
     assertEquals("contains 1 metric", 1, cr.metrics().size());
-    assertEquals("updated metric value", 3, cr.getMetric("m3"));
+    checkMetricValue("updated metric value", cr, "m3", 3);
     // tags cache should be empty so far
     assertEquals("no tags", 0, cr.tags().size());
     // until now
     cr = cache.update(mr3, true);
     assertEquals("Got 1 tag", 1, cr.tags().size());
     assertEquals("Tag value", "tv3", cr.getTag("t"));
-    assertEquals("Metric value", 3, cr.getMetric("m3"));
+    checkMetricValue("Metric value", cr, "m3", 3);
   }
 
+  @SuppressWarnings("deprecation")
   @Test public void testGet() {
     MetricsCache cache = new MetricsCache();
     assertNull("empty", cache.get("r", Arrays.asList(makeTag("t", "t"))));
@@ -85,7 +87,7 @@ public class TestMetricsCache {
 
     assertNotNull("Got record", cr);
     assertEquals("contains 1 metric", 1, cr.metrics().size());
-    assertEquals("new metric value", 1, cr.getMetric("m"));
+    checkMetricValue("new metric value", cr, "m", 1);
   }
 
   /**
@@ -109,7 +111,7 @@ public class TestMetricsCache {
       cr = cache.update(makeRecord("r",
           Arrays.asList(makeTag("t"+ i, ""+ i)),
           Arrays.asList(makeMetric("m", i))));
-      assertEquals("new metrics value", i, cr.getMetric("m"));
+      checkMetricValue("new metric value", cr, "m", i);
       if (i < MetricsCache.MAX_RECS_PER_NAME_DEFAULT) {
         assertNotNull("t0 is still there", cache.get("r", t0));
       }
@@ -117,6 +119,13 @@ public class TestMetricsCache {
     assertNull("t0 is gone", cache.get("r", t0));
   }
 
+  private void checkMetricValue(String description, MetricsCache.Record cr,
+      String key, Number val) {
+    assertEquals(description, val, cr.getMetric(key));
+    assertNotNull("metric not null", cr.getMetricInstance(key));
+    assertEquals(description, val, cr.getMetricInstance(key).value());
+  }
+
   private MetricsRecord makeRecord(String name, Collection<MetricsTag> tags,
                                    Collection<AbstractMetric> metrics) {
     MetricsRecord mr = mock(MetricsRecord.class);

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/net/StaticMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/net/StaticMapping.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/net/StaticMapping.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/net/StaticMapping.java Tue Jul 26 01:53:10 2011
@@ -18,6 +18,7 @@
 package org.apache.hadoop.net;
 
 import java.util.*;
+import java.net.UnknownHostException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -59,4 +60,19 @@ public class StaticMapping extends Confi
       return m;
     }
   }
+  public List<String> resolveValidHosts(List<String> names)
+    throws UnknownHostException {
+    List<String> m = new ArrayList<String>();
+    synchronized (nameToRackMap) {
+      for (String name : names) {
+        String rackId;
+        if ((rackId = nameToRackMap.get(name)) != null) {
+          m.add(rackId);
+        } else {
+          throw new UnknownHostException(name);
+        }
+      }
+      return m;
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/net/TestScriptBasedMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/net/TestScriptBasedMapping.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/net/TestScriptBasedMapping.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/net/TestScriptBasedMapping.java Tue Jul 26 01:53:10 2011
@@ -19,6 +19,7 @@ package org.apache.hadoop.net;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.net.UnknownHostException;
 
 import org.apache.hadoop.conf.Configuration;
 
@@ -26,21 +27,59 @@ import junit.framework.TestCase;
 
 public class TestScriptBasedMapping extends TestCase {
 
-  public void testNoArgsMeansNoResult() {
-    ScriptBasedMapping mapping = new ScriptBasedMapping();
+  private ScriptBasedMapping mapping;
+  private Configuration conf;
+  private List<String> names;
+  
+  public TestScriptBasedMapping() {
+    mapping = new ScriptBasedMapping();
 
-    Configuration conf = new Configuration();
+    conf = new Configuration();
     conf.setInt(ScriptBasedMapping.SCRIPT_ARG_COUNT_KEY,
         ScriptBasedMapping.MIN_ALLOWABLE_ARGS - 1);
     conf.set(ScriptBasedMapping.SCRIPT_FILENAME_KEY, "any-filename");
 
-    mapping.setConf(conf);
+    mapping.setConf(conf);    
+  }
 
-    List<String> names = new ArrayList<String>();
+  public void testNoArgsMeansNoResult() {
+    names = new ArrayList<String>();
     names.add("some.machine.name");
     names.add("other.machine.name");
-
     List<String> result = mapping.resolve(names);
     assertNull(result);
   }
+  
+  public void testResolveValidInvalidHostException() {
+    names = new ArrayList<String>();
+    names.add("1.com"); // Add invalid hostname that doesn't resolve
+    boolean exceptionThrown = false;
+    try {
+      mapping.resolveValidHosts(names);
+    } catch (UnknownHostException e) {
+      exceptionThrown = true;
+    }
+    assertTrue(
+        "resolveValidHosts did not throw UnknownHostException for invalid host",
+        exceptionThrown);
+  }
+
+  public void testResolveValidHostNoException() {
+    conf.setInt(ScriptBasedMapping.SCRIPT_ARG_COUNT_KEY,
+        ScriptBasedMapping.MIN_ALLOWABLE_ARGS);
+    conf.set(ScriptBasedMapping.SCRIPT_FILENAME_KEY, "echo");
+    mapping.setConf(conf);    
+
+    names = new ArrayList<String>();
+    names.add("some.machine.name");
+    names.add("other.machine.name");
+    
+    boolean exceptionThrown = false;
+    try {
+      mapping.resolveValidHosts(names);
+    } catch (UnknownHostException e) {
+      exceptionThrown = true;
+    }
+    assertFalse("resolveValidHosts threw Exception for valid host", exceptionThrown);
+  }
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/util/TestPureJavaCrc32.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/util/TestPureJavaCrc32.java?rev=1150969&r1=1150968&r2=1150969&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/util/TestPureJavaCrc32.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/util/TestPureJavaCrc32.java Tue Jul 26 01:53:10 2011
@@ -101,11 +101,10 @@ public class TestPureJavaCrc32 {
    * that java.util.zip.CRC32 uses.
    */
   public static class Table {
-    private static final int polynomial = 0xEDB88320;
-
     private final int[][] tables;
 
-    private Table(final int nBits, final int nTables) {
+    private Table(final int nBits, final int nTables,
+        long polynomial) {
       tables = new int[nTables][];
       final int size = 1 << nBits;
       for(int i = 0; i < tables.length; i++) {
@@ -169,10 +168,17 @@ public class TestPureJavaCrc32 {
 
     /** Generate CRC-32 lookup tables */
     public static void main(String[] args) throws FileNotFoundException {
+      if (args.length != 1) {
+        System.err.println("Usage: " + Table.class.getName() +
+            " <polynomial>");
+        System.exit(1);
+      }
+      long polynomial = Long.parseLong(args[0], 16);
+      
       int i = 8;
       final PrintStream out = new PrintStream(
           new FileOutputStream("table" + i + ".txt"), true);
-      final Table t = new Table(i, 16);
+      final Table t = new Table(i, 16, polynomial);
       final String s = t.toString();
       System.out.println(s);
       out.println(s);



Mime
View raw message