apex-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t..@apache.org
Subject [01/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module
Date Wed, 18 May 2016 20:41:51 GMT
Repository: incubator-apex-malhar
Updated Branches:
  refs/heads/master 029291d47 -> 3ce83708f


http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/org/apache/hadoop/io/file/tfile/DTFileTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/DTFileTest.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/DTFileTest.java
index deac363..ef946ac 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/DTFileTest.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/DTFileTest.java
@@ -18,25 +18,28 @@
  */
 package org.apache.hadoop.io.file.tfile;
 
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Random;
+
 import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.BytesWritable;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Random;
 
 public class DTFileTest
 {
-   private static String ROOT =
+  private static String ROOT =
       System.getProperty("test.build.data", "target/tfile-test");
 
   private Configuration conf;
@@ -48,7 +51,8 @@ public class DTFileTest
   private KVGenerator kvGen;
 
 
-  static class TestConf {
+  static class TestConf
+  {
     public int minWordLen = 5;
     public int maxWordLen = 20;
     public int dictSize = 1000;
@@ -77,23 +81,18 @@ public class DTFileTest
     fs = path.getFileSystem(conf);
     timer = new NanoTimer(false);
     rng = new Random();
-    keyLenGen =
-        new RandomDistribution.Zipf(new Random(rng.nextLong()),
-            tconf.minKeyLen, tconf.maxKeyLen, 1.2);
-    RandomDistribution.DiscreteRNG valLenGen =
-        new RandomDistribution.Flat(new Random(rng.nextLong()),
-            tconf.minValLength, tconf.maxValLength);
-    RandomDistribution.DiscreteRNG wordLenGen =
-        new RandomDistribution.Flat(new Random(rng.nextLong()),
-            tconf.minWordLen, tconf.maxWordLen);
-    kvGen =
-        new KVGenerator(rng, true, keyLenGen, valLenGen, wordLenGen,
-            tconf.dictSize);
+    keyLenGen = new RandomDistribution.Zipf(new Random(rng.nextLong()), tconf.minKeyLen,
tconf.maxKeyLen, 1.2);
+    RandomDistribution.DiscreteRNG valLenGen = new RandomDistribution.Flat(new Random(rng.nextLong()),
+        tconf.minValLength, tconf.maxValLength);
+    RandomDistribution.DiscreteRNG wordLenGen = new RandomDistribution.Flat(new Random(rng.nextLong()),
+        tconf.minWordLen, tconf.maxWordLen);
+    kvGen = new KVGenerator(rng, true, keyLenGen, valLenGen, wordLenGen,
+        tconf.dictSize);
   }
 
 
-  private static FSDataOutputStream createFSOutput(Path name, FileSystem fs)
-      throws IOException {
+  private static FSDataOutputStream createFSOutput(Path name, FileSystem fs) throws IOException
+  {
     if (fs.exists(name)) {
       fs.delete(name, true);
     }
@@ -110,9 +109,7 @@ public class DTFileTest
     byte[] key = new byte[16];
     ByteBuffer bb = ByteBuffer.wrap(key);
     try {
-      DTFile.Writer writer =
-          new DTFile.Writer(fout, tconf.minBlockSize, tconf.compress, "memcmp",
-              conf);
+      DTFile.Writer writer = new DTFile.Writer(fout, tconf.minBlockSize, tconf.compress,
"memcmp", conf);
       try {
         BytesWritable tmpKey = new BytesWritable();
         BytesWritable val = new BytesWritable();
@@ -129,18 +126,16 @@ public class DTFileTest
               .getSize());
           tuples++;
         }
-      }
-      finally {
+      } finally {
         writer.close();
       }
-    }
-    finally {
+    } finally {
       fout.close();
     }
 
     long fsize = fs.getFileStatus(path).getLen();
 
-    System.out.println("Total tuple wrote " + tuples + " File size " + fsize / (1024.0 *
1024));
+    LOG.debug("Total tuple wrote {} File size {}", tuples, fsize / (1024.0 * 1024));
   }
 
 
@@ -180,13 +175,13 @@ public class DTFileTest
     long hit = CacheManager.getCache().stats().hitCount();
     scanner.lowerBound(key);
     Assert.assertEquals("Cache contains some blocks ", CacheManager.getCacheSize(), numBlocks);
-    Assert.assertEquals("Cache hit ", CacheManager.getCache().stats().hitCount(), hit+1);
+    Assert.assertEquals("Cache hit ", CacheManager.getCache().stats().hitCount(), hit + 1);
 
     /* test cache miss */
     scanner.close();
     hit = CacheManager.getCache().stats().hitCount();
     long oldmiss = CacheManager.getCache().stats().missCount();
-    ikey = tuples-1;
+    ikey = tuples - 1;
     bb.clear();
     bb.putLong(ikey);
     numBlocks = CacheManager.getCacheSize();
@@ -219,4 +214,6 @@ public class DTFileTest
     writeTFile();
   }
 
+  private static final Logger LOG = LoggerFactory.getLogger(DTFileTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
index 12857d7..f92d9aa 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
@@ -23,22 +23,23 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.Arrays;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.file.tfile.DTFile.Reader;
-import org.apache.hadoop.io.file.tfile.DTFile.Writer;
 import org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner;
+import org.apache.hadoop.io.file.tfile.DTFile.Writer;
+
+import junit.framework.TestCase;
 
 /**
  * test tfile features.
  * 
  */
-public class TestDTFile extends TestCase {
+public class TestDTFile extends TestCase
+{
   private static String ROOT =
       System.getProperty("test.build.data", "target/tfile-test");
   private FileSystem fs;
@@ -48,18 +49,21 @@ public class TestDTFile extends TestCase {
   private static final String localFormatter = "%010d";
 
   @Override
-  public void setUp() throws IOException {
+  public void setUp() throws IOException
+  {
     conf = new Configuration();
     fs = FileSystem.get(conf);
   }
 
   @Override
-  public void tearDown() throws IOException {
+  public void tearDown() throws IOException
+  {
     // do nothing
   }
 
   // read a key from the scanner
-  public byte[] readKey(Scanner scanner) throws IOException {
+  public byte[] readKey(Scanner scanner) throws IOException
+  {
     int keylen = scanner.entry().getKeyLength();
     byte[] read = new byte[keylen];
     scanner.entry().getKey(read);
@@ -67,7 +71,8 @@ public class TestDTFile extends TestCase {
   }
 
   // read a value from the scanner
-  public byte[] readValue(Scanner scanner) throws IOException {
+  public byte[] readValue(Scanner scanner) throws IOException
+  {
     int valueLen = scanner.entry().getValueLength();
     byte[] read = new byte[valueLen];
     scanner.entry().getValue(read);
@@ -75,7 +80,8 @@ public class TestDTFile extends TestCase {
   }
 
   // read a long value from the scanner
-  public byte[] readLongValue(Scanner scanner, int len) throws IOException {
+  public byte[] readLongValue(Scanner scanner, int len) throws IOException
+  {
     DataInputStream din = scanner.entry().getValueStream();
     byte[] b = new byte[len];
     din.readFully(b);
@@ -86,7 +92,8 @@ public class TestDTFile extends TestCase {
   // write some records into the tfile
   // write them twice
   private int writeSomeRecords(Writer writer, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     String value = "value";
     for (int i = start; i < (start + n); i++) {
       String key = String.format(localFormatter, i);
@@ -98,7 +105,8 @@ public class TestDTFile extends TestCase {
 
   // read the records and check
   private int readAndCheckbytes(Scanner scanner, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     String value = "value";
     for (int i = start; i < (start + n); i++) {
       byte[] key = readKey(scanner);
@@ -125,7 +133,8 @@ public class TestDTFile extends TestCase {
   // write some large records
   // write them twice
   private int writeLargeRecords(Writer writer, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     byte[] value = new byte[largeVal];
     for (int i = start; i < (start + n); i++) {
       String key = String.format(localFormatter, i);
@@ -138,7 +147,8 @@ public class TestDTFile extends TestCase {
   // read large records
   // read them twice since its duplicated
   private int readLargeRecords(Scanner scanner, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     for (int i = start; i < (start + n); i++) {
       byte[] key = readKey(scanner);
       String keyStr = String.format(localFormatter, i);
@@ -154,7 +164,8 @@ public class TestDTFile extends TestCase {
   }
 
   // write empty keys and values
-  private void writeEmptyRecords(Writer writer, int n) throws IOException {
+  private void writeEmptyRecords(Writer writer, int n) throws IOException
+  {
     byte[] key = new byte[0];
     byte[] value = new byte[0];
     for (int i = 0; i < n; i++) {
@@ -163,7 +174,8 @@ public class TestDTFile extends TestCase {
   }
 
   // read empty keys and values
-  private void readEmptyRecords(Scanner scanner, int n) throws IOException {
+  private void readEmptyRecords(Scanner scanner, int n) throws IOException
+  {
     byte[] key = new byte[0];
     byte[] value = new byte[0];
     byte[] readKey = null;
@@ -178,7 +190,8 @@ public class TestDTFile extends TestCase {
   }
 
   private int writePrepWithKnownLength(Writer writer, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     // get the length of the key
     String key = String.format(localFormatter, start);
     int keyLen = key.getBytes().length;
@@ -198,7 +211,8 @@ public class TestDTFile extends TestCase {
   }
 
   private int readPrepWithKnownLength(Scanner scanner, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     for (int i = start; i < (start + n); i++) {
       String key = String.format(localFormatter, i);
       byte[] read = readKey(scanner);
@@ -212,7 +226,8 @@ public class TestDTFile extends TestCase {
   }
 
   private int writePrepWithUnkownLength(Writer writer, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     for (int i = start; i < (start + n); i++) {
       DataOutputStream out = writer.prepareAppendKey(-1);
       String localKey = String.format(localFormatter, i);
@@ -227,7 +242,8 @@ public class TestDTFile extends TestCase {
   }
 
   private int readPrepWithUnknownLength(Scanner scanner, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     for (int i = start; i < start; i++) {
       String key = String.format(localFormatter, i);
       byte[] read = readKey(scanner);
@@ -235,8 +251,7 @@ public class TestDTFile extends TestCase {
       try {
         read = readValue(scanner);
         assertTrue(false);
-      }
-      catch (IOException ie) {
+      } catch (IOException ie) {
         // should have thrown exception
       }
       String value = "value" + key;
@@ -247,11 +262,13 @@ public class TestDTFile extends TestCase {
     return (start + n);
   }
 
-  private byte[] getSomeKey(int rowId) {
+  private byte[] getSomeKey(int rowId)
+  {
     return String.format(localFormatter, rowId).getBytes();
   }
 
-  private void writeRecords(Writer writer) throws IOException {
+  private void writeRecords(Writer writer) throws IOException
+  {
     writeEmptyRecords(writer, 10);
     int ret = writeSomeRecords(writer, 0, 100);
     ret = writeLargeRecords(writer, ret, 1);
@@ -260,7 +277,8 @@ public class TestDTFile extends TestCase {
     writer.close();
   }
 
-  private void readAllRecords(Scanner scanner) throws IOException {
+  private void readAllRecords(Scanner scanner) throws IOException
+  {
     readEmptyRecords(scanner, 10);
     int ret = readAndCheckbytes(scanner, 0, 100);
     ret = readLargeRecords(scanner, ret, 1);
@@ -268,8 +286,11 @@ public class TestDTFile extends TestCase {
     ret = readPrepWithUnknownLength(scanner, ret, 50);
   }
 
-  private FSDataOutputStream createFSOutput(Path name) throws IOException {
-    if (fs.exists(name)) fs.delete(name, true);
+  private FSDataOutputStream createFSOutput(Path name) throws IOException
+  {
+    if (fs.exists(name)) {
+      fs.delete(name, true);
+    }
     FSDataOutputStream fout = fs.create(name);
     return fout;
   }
@@ -277,7 +298,8 @@ public class TestDTFile extends TestCase {
   /**
    * test none codecs
    */
-  void basicWithSomeCodec(String codec) throws IOException {
+  void basicWithSomeCodec(String codec) throws IOException
+  {
     Path ncTFile = new Path(ROOT, "basic.tfile");
     FSDataOutputStream fout = createFSOutput(ncTFile);
     Writer writer = new Writer(fout, minBlockSize, codec, "memcmp", conf);
@@ -330,7 +352,8 @@ public class TestDTFile extends TestCase {
   }
 
   // unsorted with some codec
-  void unsortedWithSomeCodec(String codec) throws IOException {
+  void unsortedWithSomeCodec(String codec) throws IOException
+  {
     Path uTfile = new Path(ROOT, "unsorted.tfile");
     FSDataOutputStream fout = createFSOutput(uTfile);
     Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
@@ -349,19 +372,22 @@ public class TestDTFile extends TestCase {
     fs.delete(uTfile, true);
   }
 
-  public void testTFileFeatures() throws IOException {
+  public void testTFileFeatures() throws IOException
+  {
     basicWithSomeCodec("none");
     basicWithSomeCodec("gz");
   }
 
   // test unsorted t files.
-  public void testUnsortedTFileFeatures() throws IOException {
+  public void testUnsortedTFileFeatures() throws IOException
+  {
     unsortedWithSomeCodec("none");
     unsortedWithSomeCodec("gz");
   }
 
   private void writeNumMetablocks(Writer writer, String compression, int n)
-      throws IOException {
+      throws IOException
+  {
     for (int i = 0; i < n; i++) {
       DataOutputStream dout =
           writer.prepareMetaBlock("TfileMeta" + i, compression);
@@ -372,25 +398,26 @@ public class TestDTFile extends TestCase {
   }
 
   private void someTestingWithMetaBlock(Writer writer, String compression)
-      throws IOException {
+      throws IOException
+  {
     DataOutputStream dout = null;
     writeNumMetablocks(writer, compression, 10);
     try {
       dout = writer.prepareMetaBlock("TfileMeta1", compression);
       assertTrue(false);
-    }
-    catch (MetaBlockAlreadyExists me) {
+    } catch (MetaBlockAlreadyExists me) {
       // avoid this exception
     }
     dout = writer.prepareMetaBlock("TFileMeta100", compression);
     dout.close();
   }
 
-  private void readNumMetablocks(Reader reader, int n) throws IOException {
+  private void readNumMetablocks(Reader reader, int n) throws IOException
+  {
     int len = ("something to test" + 0).getBytes().length;
     for (int i = 0; i < n; i++) {
       DataInputStream din = reader.getMetaBlock("TfileMeta" + i);
-      byte b[] = new byte[len];
+      byte[] b = new byte[len];
       din.readFully(b);
       assertTrue("faield to match metadata", Arrays.equals(
           ("something to test" + i).getBytes(), b));
@@ -398,14 +425,14 @@ public class TestDTFile extends TestCase {
     }
   }
 
-  private void someReadingWithMetaBlock(Reader reader) throws IOException {
+  private void someReadingWithMetaBlock(Reader reader) throws IOException
+  {
     DataInputStream din = null;
     readNumMetablocks(reader, 10);
     try {
       din = reader.getMetaBlock("NO ONE");
       assertTrue(false);
-    }
-    catch (MetaBlockDoesNotExist me) {
+    } catch (MetaBlockDoesNotExist me) {
       // should catch
     }
     din = reader.getMetaBlock("TFileMeta100");
@@ -415,7 +442,8 @@ public class TestDTFile extends TestCase {
   }
 
   // test meta blocks for tfiles
-  public void _testMetaBlocks() throws IOException {
+  public void _testMetaBlocks() throws IOException
+  {
     Path mFile = new Path(ROOT, "meta.tfile");
     FSDataOutputStream fout = createFSOutput(mFile);
     Writer writer = new Writer(fout, minBlockSize, "none", null, conf);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
index 071d752..f3479de 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
@@ -24,6 +24,11 @@ import java.io.EOFException;
 import java.io.IOException;
 import java.util.Random;
 
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -31,26 +36,23 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.compress.zlib.ZlibFactory;
 import org.apache.hadoop.io.file.tfile.DTFile.Reader;
-import org.apache.hadoop.io.file.tfile.DTFile.Writer;
 import org.apache.hadoop.io.file.tfile.DTFile.Reader.Location;
 import org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.apache.hadoop.io.file.tfile.DTFile.Writer;
 
 /**
- * 
+ *
  * Byte arrays test case class using GZ compression codec, base class of none
  * and LZO compression classes.
- * 
+ *
  */
-public class TestDTFileByteArrays {
+public class TestDTFileByteArrays
+{
   private static String ROOT =
       System.getProperty("test.build.data", "target/tfile-test");
-  private final static int BLOCK_SIZE = 512;
-  private final static int BUF_SIZE = 64;
-  private final static int K = 1024;
+  private static final int BLOCK_SIZE = 512;
+  private static final int BUF_SIZE = 64;
+  private static final int K = 1024;
   protected boolean skip = false;
 
   private static final String KEY = "key";
@@ -76,19 +78,22 @@ public class TestDTFileByteArrays {
   private int records2ndBlock = usingNative ? 5574 : 4263;
 
   public void init(String compression, String comparator,
-      int numRecords1stBlock, int numRecords2ndBlock) {
+      int numRecords1stBlock, int numRecords2ndBlock)
+  {
     init(compression, comparator);
     this.records1stBlock = numRecords1stBlock;
     this.records2ndBlock = numRecords2ndBlock;
   }
   
-  public void init(String compression, String comparator) {
+  public void init(String compression, String comparator)
+  {
     this.compression = compression;
     this.comparator = comparator;
   }
 
   @Before
-  public void setUp() throws IOException {
+  public void setUp() throws IOException
+  {
     path = new Path(ROOT, outputFile);
     fs = path.getFileSystem(conf);
     out = fs.create(path);
@@ -96,15 +101,19 @@ public class TestDTFileByteArrays {
   }
 
   @After
-  public void tearDown() throws IOException {
-    if (!skip)
+  public void tearDown() throws IOException
+  {
+    if (!skip) {
       fs.delete(path, true);
+    }
   }
 
   @Test
-  public void testNoDataEntry() throws IOException {
-    if (skip) 
+  public void testNoDataEntry() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
 
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -116,9 +125,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testOneDataEntry() throws IOException {
-    if (skip)
+  public void testOneDataEntry() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(1);
     readRecords(1);
 
@@ -130,22 +141,26 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testTwoDataEntries() throws IOException {
-    if (skip)
+  public void testTwoDataEntries() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(2);
     readRecords(2);
   }
 
   /**
    * Fill up exactly one block.
-   * 
+   *
    * @throws IOException
    */
   @Test
-  public void testOneBlock() throws IOException {
-    if (skip)
+  public void testOneBlock() throws IOException
+  {
+    if (skip) {
       return;
+    }
     // just under one block
     writeRecords(records1stBlock);
     readRecords(records1stBlock);
@@ -155,13 +170,15 @@ public class TestDTFileByteArrays {
 
   /**
    * One block plus one record.
-   * 
+   *
    * @throws IOException
    */
   @Test
-  public void testOneBlockPlusOneEntry() throws IOException {
-    if (skip)
+  public void testOneBlockPlusOneEntry() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(records1stBlock + 1);
     readRecords(records1stBlock + 1);
     checkBlockIndex(records1stBlock - 1, 0);
@@ -169,18 +186,22 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testTwoBlocks() throws IOException {
-    if (skip)
+  public void testTwoBlocks() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(records1stBlock + 5);
     readRecords(records1stBlock + 5);
     checkBlockIndex(records1stBlock + 4, 1);
   }
 
   @Test
-  public void testThreeBlocks() throws IOException {
-    if (skip) 
+  public void testThreeBlocks() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(2 * records1stBlock + 5);
     readRecords(2 * records1stBlock + 5);
 
@@ -224,17 +245,20 @@ public class TestDTFileByteArrays {
     readKeyManyTimes(records1stBlock + 10);
   }
 
-  Location locate(Scanner scanner, byte[] key) throws IOException {
-    if (scanner.seekTo(key) == true) {
+  Location locate(Scanner scanner, byte[] key) throws IOException
+  {
+    if (scanner.seekTo(key)) {
       return scanner.currentLocation;
     }
     return scanner.endLocation;
   }
   
   @Test
-  public void testLocate() throws IOException {
-    if (skip)
+  public void testLocate() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(3 * records1stBlock);
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner = reader.createScanner();
@@ -248,9 +272,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureWriterNotClosed() throws IOException {
-    if (skip)
+  public void testFailureWriterNotClosed() throws IOException
+  {
+    if (skip) {
       return;
+    }
     Reader reader = null;
     try {
       reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -265,9 +291,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureWriteMetaBlocksWithSameName() throws IOException {
-    if (skip)
+  public void testFailureWriteMetaBlocksWithSameName() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writer.append("keyX".getBytes(), "valueX".getBytes());
 
     // create a new metablock
@@ -287,9 +315,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureGetNonExistentMetaBlock() throws IOException {
-    if (skip)
+  public void testFailureGetNonExistentMetaBlock() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writer.append("keyX".getBytes(), "valueX".getBytes());
 
     // create a new metablock
@@ -314,9 +344,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureWriteRecordAfterMetaBlock() throws IOException {
-    if (skip)
+  public void testFailureWriteRecordAfterMetaBlock() throws IOException
+  {
+    if (skip) {
       return;
+    }
     // write a key/value first
     writer.append("keyX".getBytes(), "valueX".getBytes());
     // create a new metablock
@@ -336,9 +368,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureReadValueManyTimes() throws IOException {
-    if (skip)
+  public void testFailureReadValueManyTimes() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(5);
 
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -360,9 +394,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureBadCompressionCodec() throws IOException {
-    if (skip)
+  public void testFailureBadCompressionCodec() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
     out = fs.create(path);
     try {
@@ -375,9 +411,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureOpenEmptyFile() throws IOException {
-    if (skip)
+  public void testFailureOpenEmptyFile() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
     // create an absolutely empty file
     path = new Path(fs.getWorkingDirectory(), outputFile);
@@ -392,9 +430,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureOpenRandomFile() throws IOException {
-    if (skip)
+  public void testFailureOpenRandomFile() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
     // create an random file
     path = new Path(fs.getWorkingDirectory(), outputFile);
@@ -416,9 +456,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureKeyLongerThan64K() throws IOException {
-    if (skip)
+  public void testFailureKeyLongerThan64K() throws IOException
+  {
+    if (skip) {
       return;
+    }
     byte[] buf = new byte[64 * K + 1];
     Random rand = new Random();
     rand.nextBytes(buf);
@@ -431,9 +473,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureOutOfOrderKeys() throws IOException {
-    if (skip)
+  public void testFailureOutOfOrderKeys() throws IOException
+  {
+    if (skip) {
       return;
+    }
     try {
       writer.append("keyM".getBytes(), "valueM".getBytes());
       writer.append("keyA".getBytes(), "valueA".getBytes());
@@ -447,9 +491,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureNegativeOffset() throws IOException {
-    if (skip)
+  public void testFailureNegativeOffset() throws IOException
+  {
+    if (skip) {
       return;
+    }
     try {
       writer.append("keyX".getBytes(), -1, 4, "valueX".getBytes(), 0, 6);
       Assert.fail("Error on handling negative offset.");
@@ -460,9 +506,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureNegativeOffset_2() throws IOException {
-    if (skip)
+  public void testFailureNegativeOffset_2() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
 
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -480,9 +528,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureNegativeLength() throws IOException {
-    if (skip)
+  public void testFailureNegativeLength() throws IOException
+  {
+    if (skip) {
       return;
+    }
     try {
       writer.append("keyX".getBytes(), 0, -1, "valueX".getBytes(), 0, 6);
       Assert.fail("Error on handling negative length.");
@@ -493,9 +543,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureNegativeLength_2() throws IOException {
-    if (skip)
+  public void testFailureNegativeLength_2() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
 
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -513,9 +565,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureNegativeLength_3() throws IOException {
-    if (skip)
+  public void testFailureNegativeLength_3() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(3);
 
     Reader reader =
@@ -544,9 +598,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureCompressionNotWorking() throws IOException {
-    if (skip)
+  public void testFailureCompressionNotWorking() throws IOException
+  {
+    if (skip) {
       return;
+    }
     long rawDataSize = writeRecords(10 * records1stBlock, false);
     if (!compression.equalsIgnoreCase(Compression.Algorithm.NONE.getName())) {
       Assert.assertTrue(out.getPos() < rawDataSize);
@@ -555,9 +611,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureFileWriteNotAt0Position() throws IOException {
-    if (skip)
+  public void testFailureFileWriteNotAt0Position() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
     out = fs.create(path);
     out.write(123);
@@ -571,11 +629,13 @@ public class TestDTFileByteArrays {
     closeOutput();
   }
 
-  private long writeRecords(int count) throws IOException {
+  private long writeRecords(int count) throws IOException
+  {
     return writeRecords(count, true);
   }
 
-  private long writeRecords(int count, boolean close) throws IOException {
+  private long writeRecords(int count, boolean close) throws IOException
+  {
     long rawDataSize = writeRecords(writer, count);
     if (close) {
       closeOutput();
@@ -583,7 +643,8 @@ public class TestDTFileByteArrays {
     return rawDataSize;
   }
 
-  static long writeRecords(Writer writer, int count) throws IOException {
+  static long writeRecords(Writer writer, int count) throws IOException
+  {
     long rawDataSize = 0;
     int nx;
     for (nx = 0; nx < count; nx++) {
@@ -599,21 +660,24 @@ public class TestDTFileByteArrays {
 
   /**
    * Insert some leading 0's in front of the value, to make the keys sorted.
-   * 
+   *
    * @param prefix prefix
    * @param value  value
    * @return sorted key
    */
-  static String composeSortedKey(String prefix, int value) {
+  static String composeSortedKey(String prefix, int value)
+  {
     return String.format("%s%010d", prefix, value);
   }
 
-  private void readRecords(int count) throws IOException {
+  private void readRecords(int count) throws IOException
+  {
     readRecords(fs, path, count, conf);
   }
 
   static void readRecords(FileSystem fs, Path path, int count,
-      Configuration conf) throws IOException {
+      Configuration conf) throws IOException
+  {
     Reader reader =
         new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner = reader.createScanner();
@@ -643,7 +707,8 @@ public class TestDTFileByteArrays {
     }
   }
 
-  private void checkBlockIndex(int recordIndex, int blockIndexExpected) throws IOException
{
+  private void checkBlockIndex(int recordIndex, int blockIndexExpected) throws IOException
+  {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner = reader.createScanner();
     scanner.seekTo(composeSortedKey(KEY, recordIndex).getBytes());
@@ -654,12 +719,11 @@ public class TestDTFileByteArrays {
   }
 
   private void readValueBeforeKey(int recordIndex)
-      throws IOException {
+      throws IOException
+  {
     Reader reader =
         new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-    Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
-            .getBytes(), null);
+    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(),
null);
 
     try {
       byte[] vbuf = new byte[BUF_SIZE];
@@ -679,11 +743,10 @@ public class TestDTFileByteArrays {
   }
 
   private void readKeyWithoutValue(int recordIndex)
-      throws IOException {
+      throws IOException
+  {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-    Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
-            .getBytes(), null);
+    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(),
null);
 
     try {
       // read the indexed key
@@ -708,12 +771,11 @@ public class TestDTFileByteArrays {
   }
 
   private void readValueWithoutKey(int recordIndex)
-      throws IOException {
+      throws IOException
+  {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
 
-    Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
-            .getBytes(), null);
+    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(),
null);
 
     byte[] vbuf1 = new byte[BUF_SIZE];
     int vlen1 = scanner.entry().getValueLength();
@@ -724,20 +786,18 @@ public class TestDTFileByteArrays {
       byte[] vbuf2 = new byte[BUF_SIZE];
       int vlen2 = scanner.entry().getValueLength();
       scanner.entry().getValue(vbuf2);
-      Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
-          + (recordIndex + 1));
+      Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE + (recordIndex + 1));
     }
 
     scanner.close();
     reader.close();
   }
 
-  private void readKeyManyTimes(int recordIndex) throws IOException {
+  private void readKeyManyTimes(int recordIndex) throws IOException
+  {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
 
-    Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
-            .getBytes(), null);
+    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(),
null);
 
     // read the indexed key
     byte[] kbuf1 = new byte[BUF_SIZE];
@@ -760,7 +820,8 @@ public class TestDTFileByteArrays {
     reader.close();
   }
 
-  private void closeOutput() throws IOException {
+  private void closeOutput() throws IOException
+  {
     if (writer != null) {
       writer.close();
       writer = null;


Mime
View raw message