hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From j...@apache.org
Subject [hbase] branch branch-2.2 updated: HBASE-23635 Reduced number of Checkstyle violations in hbase-mapreduce
Date Sat, 04 Jan 2020 00:28:10 GMT
This is an automated email from the ASF dual-hosted git repository.

janh pushed a commit to branch branch-2.2
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2.2 by this push:
     new dbb74a5  HBASE-23635 Reduced number of Checkstyle violations in hbase-mapreduce
dbb74a5 is described below

commit dbb74a5526181185d0c1e38e7794f8172c527ebb
Author: Jan Hentschel <janh@apache.org>
AuthorDate: Sat Jan 4 00:48:24 2020 +0100

    HBASE-23635 Reduced number of Checkstyle violations in hbase-mapreduce
    
    Signed-off-by: Viraj Jasani <vjasani@apache.org>
    Signed-off-by: stack <stack@apache.org>
---
 .../hadoop/hbase/TestPerformanceEvaluation.java    | 30 +++++-----
 .../hadoop/hbase/mapreduce/NMapInputFormat.java    | 21 ++++---
 .../TableSnapshotInputFormatTestBase.java          | 18 +++---
 .../hadoop/hbase/mapreduce/TestCellCounter.java    | 65 +++++++++-------------
 .../hadoop/hbase/mapreduce/TestHashTable.java      | 16 +++---
 .../hbase/mapreduce/TestImportTsvParser.java       | 34 ++++++-----
 .../hadoop/hbase/mapreduce/TestSyncTable.java      | 34 +++++------
 .../hadoop/hbase/mapreduce/TestTableSplit.java     | 18 +++---
 .../mapreduce/TsvImporterCustomTestMapper.java     | 15 +++--
 .../hbase/PerformanceEvaluation_Counter.properties |  4 +-
 10 files changed, 110 insertions(+), 145 deletions(-)

diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
index 6133c9d..1940725 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
@@ -53,7 +54,6 @@ import org.apache.hbase.thirdparty.com.google.gson.Gson;
 
 @Category({MiscTests.class, SmallTests.class})
 public class TestPerformanceEvaluation {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestPerformanceEvaluation.class);
@@ -64,19 +64,19 @@ public class TestPerformanceEvaluation {
   public void testDefaultInMemoryCompaction() {
     PerformanceEvaluation.TestOptions defaultOpts =
         new PerformanceEvaluation.TestOptions();
-    assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT.toString(),
+    assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT,
         defaultOpts.getInMemoryCompaction().toString());
     HTableDescriptor htd = PerformanceEvaluation.getTableDescriptor(defaultOpts);
     for (HColumnDescriptor hcd: htd.getFamilies()) {
-      assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT.toString(),
+      assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT,
           hcd.getInMemoryCompaction().toString());
     }
   }
 
   @Test
-  public void testSerialization() throws IOException {
+  public void testSerialization() {
     PerformanceEvaluation.TestOptions options = new PerformanceEvaluation.TestOptions();
-    assertTrue(!options.isAutoFlush());
+    assertFalse(options.isAutoFlush());
     options.setAutoFlush(true);
     Gson gson = GsonUtil.createGson().create();
     String optionsString = gson.toJson(options);
@@ -101,8 +101,7 @@ public class TestPerformanceEvaluation {
     long len = fs.getFileStatus(p).getLen();
     assertTrue(len > 0);
     byte[] content = new byte[(int) len];
-    FSDataInputStream dis = fs.open(p);
-    try {
+    try (FSDataInputStream dis = fs.open(p)) {
       dis.readFully(content);
       BufferedReader br = new BufferedReader(
         new InputStreamReader(new ByteArrayInputStream(content), StandardCharsets.UTF_8));
@@ -111,8 +110,6 @@ public class TestPerformanceEvaluation {
         count++;
       }
       assertEquals(clients, count);
-    } finally {
-      dis.close();
     }
   }
 
@@ -170,9 +167,8 @@ public class TestPerformanceEvaluation {
   }
 
   @Test
-  public void testZipfian()
-  throws NoSuchMethodException, SecurityException, InstantiationException, IllegalAccessException,
-      IllegalArgumentException, InvocationTargetException {
+  public void testZipfian() throws NoSuchMethodException, SecurityException, InstantiationException,
+      IllegalAccessException, IllegalArgumentException, InvocationTargetException {
     TestOptions opts = new PerformanceEvaluation.TestOptions();
     opts.setValueZipf(true);
     final int valueSize = 1024;
@@ -197,10 +193,10 @@ public class TestPerformanceEvaluation {
   public void testSetBufferSizeOption() {
     TestOptions opts = new PerformanceEvaluation.TestOptions();
     long bufferSize = opts.getBufferSize();
-    assertEquals(bufferSize, 2l * 1024l * 1024l);
-    opts.setBufferSize(64l * 1024l);
+    assertEquals(bufferSize, 2L * 1024L * 1024L);
+    opts.setBufferSize(64L * 1024L);
     bufferSize = opts.getBufferSize();
-    assertEquals(bufferSize, 64l * 1024l);
+    assertEquals(bufferSize, 64L * 1024L);
   }
 
   @Test
@@ -265,7 +261,7 @@ public class TestPerformanceEvaluation {
     assertNotNull(options);
     assertNotNull(options.getCmdName());
     assertEquals(cmdName, options.getCmdName());
-    assertTrue(options.getMultiPut() == 10);
+    assertEquals(10, options.getMultiPut());
   }
 
   @Test
@@ -288,6 +284,6 @@ public class TestPerformanceEvaluation {
     assertNotNull(options);
     assertNotNull(options.getCmdName());
     assertEquals(cmdName, options.getCmdName());
-    assertTrue(options.getConnCount() == 10);
+    assertEquals(10, options.getConnCount());
   }
 }
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
index 3203f0c..e022bfd 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.mapreduce;
 
 import java.io.DataInput;
 import java.io.DataOutput;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -44,14 +43,12 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable>
{
 
   @Override
   public RecordReader<NullWritable, NullWritable> createRecordReader(
-      InputSplit split,
-      TaskAttemptContext tac) throws IOException, InterruptedException {
+      InputSplit split, TaskAttemptContext tac) {
     return new SingleRecordReader<>(NullWritable.get(), NullWritable.get());
   }
 
   @Override
-  public List<InputSplit> getSplits(JobContext context) throws IOException,
-      InterruptedException {
+  public List<InputSplit> getSplits(JobContext context) {
     int count = getNumMapTasks(context.getConfiguration());
     List<InputSplit> splits = new ArrayList<>(count);
     for (int i = 0; i < count; i++) {
@@ -70,21 +67,21 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable>
{
 
   private static class NullInputSplit extends InputSplit implements Writable {
     @Override
-    public long getLength() throws IOException, InterruptedException {
+    public long getLength() {
       return 0;
     }
 
     @Override
-    public String[] getLocations() throws IOException, InterruptedException {
+    public String[] getLocations() {
       return new String[] {};
     }
 
     @Override
-    public void readFields(DataInput in) throws IOException {
+    public void readFields(DataInput in) {
     }
 
     @Override
-    public void write(DataOutput out) throws IOException {
+    public void write(DataOutput out) {
     }
   }
 
@@ -125,10 +122,12 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable>
{
 
     @Override
     public boolean nextKeyValue() {
-      if (providedKey) return false;
+      if (providedKey) {
+        return false;
+      }
+
       providedKey = true;
       return true;
     }
-
   }
 }
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
index 5e7ea7a..95250d2 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatTestBase.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.mapreduce;
 
 import static org.junit.Assert.assertFalse;
@@ -80,8 +79,8 @@ public abstract class TableSnapshotInputFormatTestBase {
     throws Exception;
 
   protected abstract void testWithMapReduceImpl(HBaseTestingUtility util, TableName tableName,
-    String snapshotName, Path tableDir, int numRegions, int numSplitsPerRegion, int expectedNumSplits,
-    boolean shutdownCluster) throws Exception;
+    String snapshotName, Path tableDir, int numRegions, int numSplitsPerRegion,
+    int expectedNumSplits, boolean shutdownCluster) throws Exception;
 
   protected abstract byte[] getStartRow();
 
@@ -158,7 +157,8 @@ public abstract class TableSnapshotInputFormatTestBase {
       String snapshotName, Path tmpTableDir) throws Exception;
 
   protected void testWithMapReduce(HBaseTestingUtility util, String snapshotName,
-      int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean shutdownCluster)
throws Exception {
+      int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean shutdownCluster)
+      throws Exception {
     setupCluster();
     try {
       Path tableDir = util.getDataTestDirOnTestFS(snapshotName);
@@ -182,10 +182,11 @@ public abstract class TableSnapshotInputFormatTestBase {
         cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
     }
 
-    for (int j = 0; j < FAMILIES.length; j++) {
-      byte[] actual = result.getValue(FAMILIES[j], FAMILIES[j]);
-      Assert.assertArrayEquals("Row in snapshot does not match, expected:" + Bytes.toString(row)
-        + " ,actual:" + Bytes.toString(actual), row, actual);
+    for (byte[] family : FAMILIES) {
+      byte[] actual = result.getValue(family, family);
+      Assert.assertArrayEquals(
+        "Row in snapshot does not match, expected:" + Bytes.toString(row) + " ,actual:" +
Bytes
+          .toString(actual), row, actual);
     }
   }
 
@@ -226,5 +227,4 @@ public abstract class TableSnapshotInputFormatTestBase {
     admin.flush(tableName);
     table.close();
   }
-
 }
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
index 7930c9f..794a440 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
@@ -51,7 +51,6 @@ import org.junit.rules.TestName;
 
 @Category({MapReduceTests.class, LargeTests.class})
 public class TestCellCounter {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestCellCounter.class);
@@ -93,8 +92,7 @@ public class TestCellCounter {
   public void testCellCounter() throws Exception {
     final TableName sourceTable = TableName.valueOf(name.getMethodName());
     byte[][] families = { FAMILY_A, FAMILY_B };
-    Table t = UTIL.createTable(sourceTable, families);
-    try{
+    try (Table t = UTIL.createTable(sourceTable, families)) {
       Put p = new Put(ROW1);
       p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
       p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
@@ -107,8 +105,8 @@ public class TestCellCounter {
       t.put(p);
       String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1"
};
       runCount(args);
-      FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
-          "part-r-00000");
+      FileInputStream inputStream =
+        new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
       String data = IOUtils.toString(inputStream);
       inputStream.close();
       assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
@@ -118,8 +116,7 @@ public class TestCellCounter {
       assertTrue(data.contains("a;q" + "\t" + "1"));
       assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
       assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
-    }finally{
-      t.close();
+    } finally {
       FileUtil.fullyDelete(new File(OUTPUT_DIR));
     }
   }
@@ -131,8 +128,7 @@ public class TestCellCounter {
   public void testCellCounterPrefix() throws Exception {
     final TableName sourceTable = TableName.valueOf(name.getMethodName());
     byte[][] families = { FAMILY_A, FAMILY_B };
-    Table t = UTIL.createTable(sourceTable, families);
-    try {
+    try (Table t = UTIL.createTable(sourceTable, families)) {
       Put p = new Put(ROW1);
       p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
       p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
@@ -146,7 +142,7 @@ public class TestCellCounter {
       String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "\\x01row1"
};
       runCount(args);
       FileInputStream inputStream =
-          new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
+        new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
       String data = IOUtils.toString(inputStream);
       inputStream.close();
       assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
@@ -157,7 +153,6 @@ public class TestCellCounter {
       assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
       assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
     } finally {
-      t.close();
       FileUtil.fullyDelete(new File(OUTPUT_DIR));
     }
   }
@@ -169,8 +164,7 @@ public class TestCellCounter {
   public void testCellCounterStartTimeRange() throws Exception {
     final TableName sourceTable = TableName.valueOf(name.getMethodName());
     byte[][] families = { FAMILY_A, FAMILY_B };
-    Table t = UTIL.createTable(sourceTable, families);
-    try{
+    try (Table t = UTIL.createTable(sourceTable, families)) {
       Put p = new Put(ROW1);
       p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
       p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
@@ -181,13 +175,11 @@ public class TestCellCounter {
       p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
       p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
       t.put(p);
-      String[] args = {
-          sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(),  ";", "^row1",
-          "--starttime=" + now,
-          "--endtime=" + now + 2 };
+      String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
+        "--starttime=" + now, "--endtime=" + now + 2 };
       runCount(args);
-      FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
-          "part-r-00000");
+      FileInputStream inputStream =
+        new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
       String data = IOUtils.toString(inputStream);
       inputStream.close();
       assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
@@ -197,8 +189,7 @@ public class TestCellCounter {
       assertTrue(data.contains("a;q" + "\t" + "1"));
       assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
       assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
-    }finally{
-      t.close();
+    } finally {
       FileUtil.fullyDelete(new File(OUTPUT_DIR));
     }
   }
@@ -210,8 +201,7 @@ public class TestCellCounter {
   public void testCellCounteEndTimeRange() throws Exception {
     final TableName sourceTable = TableName.valueOf(name.getMethodName());
     byte[][] families = { FAMILY_A, FAMILY_B };
-    Table t = UTIL.createTable(sourceTable, families);
-    try{
+    try (Table t = UTIL.createTable(sourceTable, families)) {
       Put p = new Put(ROW1);
       p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
       p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
@@ -222,12 +212,11 @@ public class TestCellCounter {
       p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
       p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
       t.put(p);
-      String[] args = {
-          sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(),  ";", "^row1",
-          "--endtime=" + now + 1 };
+      String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
+        "--endtime=" + now + 1 };
       runCount(args);
-      FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
-          "part-r-00000");
+      FileInputStream inputStream =
+        new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
       String data = IOUtils.toString(inputStream);
       inputStream.close();
       assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
@@ -237,8 +226,7 @@ public class TestCellCounter {
       assertTrue(data.contains("a;q" + "\t" + "1"));
       assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
       assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
-    }finally{
-      t.close();
+    } finally {
       FileUtil.fullyDelete(new File(OUTPUT_DIR));
     }
   }
@@ -250,8 +238,7 @@ public class TestCellCounter {
   public void testCellCounteOutOfTimeRange() throws Exception {
     final TableName sourceTable = TableName.valueOf(name.getMethodName());
     byte[][] families = { FAMILY_A, FAMILY_B };
-    Table t = UTIL.createTable(sourceTable, families);
-    try{
+    try (Table t = UTIL.createTable(sourceTable, families)) {
       Put p = new Put(ROW1);
       p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
       p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
@@ -262,24 +249,22 @@ public class TestCellCounter {
       p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
       p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
       t.put(p);
-      String[] args = {
-      sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(),  ";", "--starttime=" + now
+ 1,
+      String[] args =
+        { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "--starttime=" +
now + 1,
           "--endtime=" + now + 2 };
 
       runCount(args);
-      FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
-          "part-r-00000");
+      FileInputStream inputStream =
+        new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000");
       String data = IOUtils.toString(inputStream);
-    inputStream.close();
+      inputStream.close();
       // nothing should hace been emitted to the reducer
       assertTrue(data.isEmpty());
-    }finally{
-      t.close();
+    } finally {
       FileUtil.fullyDelete(new File(OUTPUT_DIR));
     }
   }
 
-
   private boolean runCount(String[] args) throws Exception {
     // need to make a copy of the configuration because to make sure
     // different temp dirs are used.
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
index 167bccd..ffcc900 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java
@@ -52,7 +52,6 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
  */
 @Category(LargeTests.class)
 public class TestHashTable {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestHashTable.class);
@@ -109,11 +108,12 @@ public class TestHashTable {
 
     long batchSize = 300;
     int code = hashTable.run(new String[] {
-        "--batchsize=" + batchSize,
-        "--numhashfiles=" + numHashFiles,
-        "--scanbatch=2",
-        tableName.getNameAsString(),
-        testDir.toString()});
+      "--batchsize=" + batchSize,
+      "--numhashfiles=" + numHashFiles,
+      "--scanbatch=2",
+      tableName.getNameAsString(),
+      testDir.toString()
+    });
     assertEquals("test job failed", 0, code);
 
     FileSystem fs = TEST_UTIL.getTestFileSystem();
@@ -159,7 +159,7 @@ public class TestHashTable {
       MapFile.Reader reader = new MapFile.Reader(hashPath, fs.getConf());
       ImmutableBytesWritable key = new ImmutableBytesWritable();
       ImmutableBytesWritable hash = new ImmutableBytesWritable();
-      while(reader.next(key, hash)) {
+      while (reader.next(key, hash)) {
         String keyString = Bytes.toHex(key.get(), key.getOffset(), key.getLength());
         LOG.debug("Key: " + (keyString.isEmpty() ? "-1" : Integer.parseInt(keyString, 16))
             + " Hash: " + Bytes.toHex(hash.get(), hash.getOffset(), hash.getLength()));
@@ -194,6 +194,4 @@ public class TestHashTable {
     TEST_UTIL.deleteTable(tableName);
     TEST_UTIL.cleanupDataTestDirOnTestFS();
   }
-
-
 }
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
index 86c23a0..a0d1cf7 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java
@@ -46,7 +46,6 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
  */
 @Category({MapReduceTests.class, SmallTests.class})
 public class TestImportTsvParser {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestImportTsvParser.class);
@@ -165,7 +164,7 @@ public class TestImportTsvParser {
 
     byte[] line = Bytes.toBytes("rowkey\t1234\tval_a");
     ParsedLine parsed = parser.parse(line, line.length);
-    assertEquals(1234l, parsed.getTimestamp(-1));
+    assertEquals(1234L, parsed.getTimestamp(-1));
     checkParsing(parsed, Splitter.on("\t").split(Bytes.toString(line)));
   }
 
@@ -230,9 +229,9 @@ public class TestImportTsvParser {
       line = Bytes.toBytes("\t\tval_a\t1234");
       parser.parseRowKey(line, line.length);
       fail("Should get BadTsvLineException on empty rowkey.");
-    } catch (BadTsvLineException b) {
-
+    } catch (BadTsvLineException ignored) {
     }
+
     parser = new TsvParser("col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
     assertEquals(1, parser.getRowKeyColumnIndex());
     line = Bytes.toBytes("val_a\trowkey\t1234");
@@ -243,9 +242,9 @@ public class TestImportTsvParser {
       line = Bytes.toBytes("val_a");
       rowKeyOffsets = parser.parseRowKey(line, line.length);
       fail("Should get BadTsvLineException when number of columns less than rowkey position.");
-    } catch (BadTsvLineException b) {
-
+    } catch (BadTsvLineException ignored) {
     }
+
     parser = new TsvParser("col_a,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
     assertEquals(2, parser.getRowKeyColumnIndex());
     line = Bytes.toBytes("val_a\t1234\trowkey");
@@ -262,15 +261,15 @@ public class TestImportTsvParser {
     ParsedLine parse = parser.parse(line, line.length);
     assertEquals(18, parse.getAttributeKeyOffset());
     assertEquals(3, parser.getAttributesKeyColumnIndex());
-    String attributes[] = parse.getIndividualAttributes();
-    assertEquals(attributes[0], "key=>value");
+    String[] attributes = parse.getIndividualAttributes();
+    assertEquals("key=>value", attributes[0]);
     try {
       line = Bytes.toBytes("rowkey\tval_a\t1234");
       parser.parse(line, line.length);
       fail("Should get BadTsvLineException on empty rowkey.");
-    } catch (BadTsvLineException b) {
-
+    } catch (BadTsvLineException ignored) {
     }
+
     parser = new TsvParser("HBASE_ATTRIBUTES_KEY,col_a,HBASE_ROW_KEY,HBASE_TS_KEY", "\t");
     assertEquals(2, parser.getRowKeyColumnIndex());
     line = Bytes.toBytes("key=>value\tval_a\trowkey\t1234");
@@ -278,14 +277,14 @@ public class TestImportTsvParser {
     assertEquals(0, parse.getAttributeKeyOffset());
     assertEquals(0, parser.getAttributesKeyColumnIndex());
     attributes = parse.getIndividualAttributes();
-    assertEquals(attributes[0], "key=>value");
+    assertEquals("key=>value", attributes[0]);
     try {
       line = Bytes.toBytes("val_a");
       ParsedLine parse2 = parser.parse(line, line.length);
       fail("Should get BadTsvLineException when number of columns less than rowkey position.");
-    } catch (BadTsvLineException b) {
-
+    } catch (BadTsvLineException ignored) {
     }
+
     parser = new TsvParser("col_a,HBASE_ATTRIBUTES_KEY,HBASE_TS_KEY,HBASE_ROW_KEY", "\t");
     assertEquals(3, parser.getRowKeyColumnIndex());
     line = Bytes.toBytes("val_a\tkey0=>value0,key1=>value1,key2=>value2\t1234\trowkey");
@@ -294,8 +293,8 @@ public class TestImportTsvParser {
     assertEquals(6, parse.getAttributeKeyOffset());
     String[] attr = parse.getIndividualAttributes();
     int i = 0;
-    for(String str :  attr) {
-      assertEquals(("key"+i+"=>"+"value"+i), str );
+    for (String str :  attr) {
+      assertEquals(("key" + i + "=>" + "value" + i), str);
       i++;
     }
   }
@@ -310,9 +309,8 @@ public class TestImportTsvParser {
     ParsedLine parse = parser.parse(line, line.length);
     assertEquals(18, parse.getAttributeKeyOffset());
     assertEquals(3, parser.getAttributesKeyColumnIndex());
-    String attributes[] = parse.getIndividualAttributes();
-    assertEquals(attributes[0], "key=>value");
+    String[] attributes = parse.getIndividualAttributes();
+    assertEquals("key=>value", attributes[0]);
     assertEquals(29, parse.getCellVisibilityColumnOffset());
   }
-
 }
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java
index ad02039..4ff8892 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.mapreduce;
 
 import static org.junit.Assert.assertEquals;
 
-import java.io.IOException;
 import java.util.Arrays;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -55,7 +54,6 @@ import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
  */
 @Category(LargeTests.class)
 public class TestSyncTable {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestSyncTable.class);
@@ -230,10 +228,8 @@ public class TestSyncTable {
     targetTable.close();
   }
 
-  private void assertTargetDoDeletesFalse(int expectedRows, TableName
-      sourceTableName,
-      TableName targetTableName)
-      throws Exception {
+  private void assertTargetDoDeletesFalse(int expectedRows, TableName sourceTableName,
+      TableName targetTableName) throws Exception {
     Table sourceTable = TEST_UTIL.getConnection().getTable(sourceTableName);
     Table targetTable = TEST_UTIL.getConnection().getTable(targetTableName);
 
@@ -242,7 +238,7 @@ public class TestSyncTable {
     Result targetRow = targetScanner.next();
     Result sourceRow = sourceScanner.next();
     int rowsCount = 0;
-    while (targetRow!=null) {
+    while (targetRow != null) {
       rowsCount++;
       //only compares values for existing rows, skipping rows existing on
       //target only that were not deleted given --doDeletes=false
@@ -292,7 +288,7 @@ public class TestSyncTable {
           if (!CellUtil.matchingQualifier(sourceCell, targetCell)) {
             Assert.fail("Qualifiers don't match");
           }
-          if(targetRowKey < 80 && targetRowKey >= 90){
+          if (targetRowKey < 80 && targetRowKey >= 90){
             if (!CellUtil.matchingTimestamp(sourceCell, targetCell)) {
               Assert.fail("Timestamps don't match");
             }
@@ -317,10 +313,8 @@ public class TestSyncTable {
     targetTable.close();
   }
 
-  private void assertTargetDoPutsFalse(int expectedRows, TableName
-      sourceTableName,
-      TableName targetTableName)
-      throws Exception {
+  private void assertTargetDoPutsFalse(int expectedRows, TableName sourceTableName,
+      TableName targetTableName) throws Exception {
     Table sourceTable = TEST_UTIL.getConnection().getTable(sourceTableName);
     Table targetTable = TEST_UTIL.getConnection().getTable(targetTableName);
 
@@ -432,18 +426,18 @@ public class TestSyncTable {
     return syncTable.counters;
   }
 
-  private void hashSourceTable(TableName sourceTableName, Path testDir)
-      throws Exception, IOException {
+  private void hashSourceTable(TableName sourceTableName, Path testDir) throws Exception
{
     int numHashFiles = 3;
     long batchSize = 100;  // should be 2 batches per region
     int scanBatch = 1;
     HashTable hashTable = new HashTable(TEST_UTIL.getConfiguration());
     int code = hashTable.run(new String[] {
-        "--batchsize=" + batchSize,
-        "--numhashfiles=" + numHashFiles,
-        "--scanbatch=" + scanBatch,
-        sourceTableName.getNameAsString(),
-        testDir.toString()});
+      "--batchsize=" + batchSize,
+      "--numhashfiles=" + numHashFiles,
+      "--scanbatch=" + scanBatch,
+      sourceTableName.getNameAsString(),
+      testDir.toString()
+    });
     assertEquals("hash table job failed", 0, code);
 
     FileSystem fs = TEST_UTIL.getTestFileSystem();
@@ -570,6 +564,4 @@ public class TestSyncTable {
     sourceTable.close();
     targetTable.close();
   }
-
-
 }
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
index 9be1634..aa0d7d1 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
@@ -36,7 +36,6 @@ import org.junit.rules.TestName;
 
 @Category({MapReduceTests.class, SmallTests.class})
 public class TestTableSplit {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestTableSplit.class);
@@ -52,12 +51,12 @@ public class TestTableSplit {
     TableSplit split2 = new TableSplit(TableName.valueOf(name.getMethodName()),
         "row-start".getBytes(),
         "row-end".getBytes(), "location");
-    assertEquals (split1, split2);
-    assertTrue   (split1.hashCode() == split2.hashCode());
+    assertEquals(split1, split2);
+    assertTrue(split1.hashCode() == split2.hashCode());
     HashSet<TableSplit> set = new HashSet<>(2);
     set.add(split1);
     set.add(split2);
-    assertTrue(set.size() == 1);
+    assertEquals(1, set.size());
   }
 
   /**
@@ -72,12 +71,12 @@ public class TestTableSplit {
             "row-start".getBytes(),
             "row-end".getBytes(), "location", 1982);
 
-    assertEquals (split1, split2);
-    assertTrue   (split1.hashCode() == split2.hashCode());
+    assertEquals(split1, split2);
+    assertTrue(split1.hashCode() == split2.hashCode());
     HashSet<TableSplit> set = new HashSet<>(2);
     set.add(split1);
     set.add(split2);
-    assertTrue(set.size() == 1);
+    assertEquals(1, set.size());
   }
 
   /**
@@ -117,14 +116,14 @@ public class TestTableSplit {
             + "encoded region name: encoded-region-name)";
     Assert.assertEquals(str, split.toString());
 
-    split = new TableSplit((TableName) null, null, null, null);
+    split = new TableSplit(null, null, null, null);
     str =
         "HBase table split(table name: null, scan: , start row: null, "
             + "end row: null, region location: null, "
             + "encoded region name: )";
     Assert.assertEquals(str, split.toString());
 
-    split = new TableSplit((TableName) null, null, null, null, null, null, 1000L);
+    split = new TableSplit(null, null, null, null, null, null, 1000L);
     str =
         "HBase table split(table name: null, scan: , start row: null, "
             + "end row: null, region location: null, "
@@ -132,4 +131,3 @@ public class TestTableSplit {
     Assert.assertEquals(str, split.toString());
   }
 }
-
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java
index aea5036..8925615 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java
@@ -17,15 +17,15 @@
  */
 package org.apache.hadoop.hbase.mapreduce;
 
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.client.Put;
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.KeyValue;
-
-import java.io.IOException;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
 
 /**
  * Dummy mapper used for unit tests to verify that the mapper can be injected.
@@ -33,7 +33,6 @@ import java.io.IOException;
  * reading the input data before writing it to HFiles.
  */
 public class TsvImporterCustomTestMapper extends TsvImporterMapper {
-
   @Override
   protected void setup(Context context) {
     doSetup(context);
diff --git a/hbase-mapreduce/src/test/resources/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties
b/hbase-mapreduce/src/test/resources/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties
index 802eb89..1d99808 100644
--- a/hbase-mapreduce/src/test/resources/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties
+++ b/hbase-mapreduce/src/test/resources/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties
@@ -20,9 +20,9 @@
 
 CounterGroupName=              HBase Performance Evaluation
 ELAPSED_TIME.name=             Elapsed time in milliseconds
-ROWS.name=                                        Row count
+ROWS.name=                                    Row count
 # ResourceBundle properties file for Map-Reduce counters
 
 CounterGroupName=              HBase Performance Evaluation
 ELAPSED_TIME.name=             Elapsed time in milliseconds
-ROWS.name=                                        Row count
+ROWS.name=                                    Row count


Mime
View raw message