hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject [2/2] hbase git commit: HBASE-19195 error-prone fixes for client, mr, and server
Date Thu, 09 Nov 2017 19:59:22 GMT
HBASE-19195 error-prone fixes for client, mr, and server

Signed-off-by: Andrew Purtell <apurtell@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/110f2db7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/110f2db7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/110f2db7

Branch: refs/heads/branch-1.4
Commit: 110f2db756e50d30ea82ceb141b63de3e10d75a0
Parents: 51680b9
Author: Mike Drob <mdrob@apache.org>
Authored: Mon Nov 6 21:16:48 2017 -0600
Committer: Andrew Purtell <apurtell@apache.org>
Committed: Thu Nov 9 11:37:57 2017 -0800

----------------------------------------------------------------------
 .../hadoop/hbase/filter/FilterListWithAND.java  | 96 ++++++++++----------
 .../hbase/mapreduce/TableInputFormatBase.java   |  5 +-
 .../hadoop/hbase/PerformanceEvaluation.java     | 10 +-
 .../hbase/client/TestFromClientSide3.java       | 29 +++---
 .../hadoop/hbase/mapred/TestSplitTable.java     |  8 +-
 .../hadoop/hbase/mapreduce/TestImportTsv.java   |  3 +-
 .../TsvImporterCustomTestMapperForOprAttr.java  |  9 +-
 .../hbase/regionserver/TestMajorCompaction.java |  1 +
 8 files changed, 90 insertions(+), 71 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/110f2db7/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
index ca2c149..19e4d3a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
@@ -90,57 +90,59 @@ public class FilterListWithAND extends FilterListBase {
    *         code of current sub-filter.
    */
   private ReturnCode mergeReturnCode(ReturnCode rc, ReturnCode localRC) {
-    if (rc == ReturnCode.SEEK_NEXT_USING_HINT || localRC == ReturnCode.SEEK_NEXT_USING_HINT)
{
+    if (rc == ReturnCode.SEEK_NEXT_USING_HINT) {
       return ReturnCode.SEEK_NEXT_USING_HINT;
     }
     switch (localRC) {
-    case INCLUDE:
-      return rc;
-    case INCLUDE_AND_NEXT_COL:
-      if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL)) {
-        return ReturnCode.INCLUDE_AND_NEXT_COL;
-      }
-      if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
-        return ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW;
-      }
-      if (isInReturnCodes(rc, ReturnCode.SKIP, ReturnCode.NEXT_COL)) {
-        return ReturnCode.NEXT_COL;
-      }
-      if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) {
-        return ReturnCode.NEXT_ROW;
-      }
-      break;
-    case INCLUDE_AND_SEEK_NEXT_ROW:
-      if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL,
-        ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
-        return ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW;
-      }
-      if (isInReturnCodes(rc, ReturnCode.SKIP, ReturnCode.NEXT_COL, ReturnCode.NEXT_ROW))
{
+      case SEEK_NEXT_USING_HINT:
+        return ReturnCode.SEEK_NEXT_USING_HINT;
+      case INCLUDE:
+        return rc;
+      case INCLUDE_AND_NEXT_COL:
+        if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL)) {
+          return ReturnCode.INCLUDE_AND_NEXT_COL;
+        }
+        if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
+          return ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW;
+        }
+        if (isInReturnCodes(rc, ReturnCode.SKIP, ReturnCode.NEXT_COL)) {
+          return ReturnCode.NEXT_COL;
+        }
+        if (isInReturnCodes(rc, ReturnCode.NEXT_ROW)) {
+          return ReturnCode.NEXT_ROW;
+        }
+        break;
+      case INCLUDE_AND_SEEK_NEXT_ROW:
+        if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL,
+          ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW)) {
+          return ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW;
+        }
+        if (isInReturnCodes(rc, ReturnCode.SKIP, ReturnCode.NEXT_COL, ReturnCode.NEXT_ROW))
{
+          return ReturnCode.NEXT_ROW;
+        }
+        break;
+      case SKIP:
+        if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.SKIP)) {
+          return ReturnCode.SKIP;
+        }
+        if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_NEXT_COL, ReturnCode.NEXT_COL)) {
+          return ReturnCode.NEXT_COL;
+        }
+        if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, ReturnCode.NEXT_ROW))
{
+          return ReturnCode.NEXT_ROW;
+        }
+        break;
+      case NEXT_COL:
+        if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL, ReturnCode.SKIP,
+          ReturnCode.NEXT_COL)) {
+          return ReturnCode.NEXT_COL;
+        }
+        if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, ReturnCode.NEXT_ROW))
{
+          return ReturnCode.NEXT_ROW;
+        }
+        break;
+      case NEXT_ROW:
         return ReturnCode.NEXT_ROW;
-      }
-      break;
-    case SKIP:
-      if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.SKIP)) {
-        return ReturnCode.SKIP;
-      }
-      if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_NEXT_COL, ReturnCode.NEXT_COL)) {
-        return ReturnCode.NEXT_COL;
-      }
-      if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, ReturnCode.NEXT_ROW))
{
-        return ReturnCode.NEXT_ROW;
-      }
-      break;
-    case NEXT_COL:
-      if (isInReturnCodes(rc, ReturnCode.INCLUDE, ReturnCode.INCLUDE_AND_NEXT_COL, ReturnCode.SKIP,
-        ReturnCode.NEXT_COL)) {
-        return ReturnCode.NEXT_COL;
-      }
-      if (isInReturnCodes(rc, ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, ReturnCode.NEXT_ROW))
{
-        return ReturnCode.NEXT_ROW;
-      }
-      break;
-    case NEXT_ROW:
-      return ReturnCode.NEXT_ROW;
     }
     throw new IllegalStateException(
         "Received code is not valid. rc: " + rc + ", localRC: " + localRC);

http://git-wip-us.apache.org/repos/asf/hbase/blob/110f2db7/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index 8d12b43..f4c2f9b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -286,8 +286,9 @@ extends InputFormat<ImmutableBytesWritable, Result> {
       }
 
       //The default value of "hbase.mapreduce.input.autobalance" is false.
-      if (context.getConfiguration().getBoolean(MAPREDUCE_INPUT_AUTOBALANCE, false) != false)
{
-        long maxAveRegionSize = context.getConfiguration().getInt(MAX_AVERAGE_REGION_SIZE,
8*1073741824);
+      if (context.getConfiguration().getBoolean(MAPREDUCE_INPUT_AUTOBALANCE, false)) {
+        long maxAveRegionSize = context.getConfiguration()
+            .getLong(MAX_AVERAGE_REGION_SIZE, 8L*1073741824); //8GB
         return calculateAutoBalancedSplits(splits, maxAveRegionSize);
       }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/110f2db7/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index fb6701f..71bc9ad 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -1003,9 +1003,13 @@ public class PerformanceEvaluation extends Configured implements Tool
{
     }
 
     int getValueLength(final Random r) {
-      if (this.opts.isValueRandom()) return Math.abs(r.nextInt() % opts.valueSize);
-      else if (this.opts.isValueZipf()) return Math.abs(this.zipf.nextInt());
-      else return opts.valueSize;
+      if (this.opts.isValueRandom()) {
+        return r.nextInt(opts.valueSize);
+      } else if (this.opts.isValueZipf()) {
+        return Math.abs(this.zipf.nextInt());
+      } else {
+        return opts.valueSize;
+      }
     }
 
     void updateValueSize(final Result [] rs) throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/110f2db7/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
index 8199525..385d4ad 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
@@ -79,13 +79,13 @@ public class TestFromClientSide3 {
   private static byte[] FAMILY = Bytes.toBytes("testFamily");
   private static Random random = new Random();
   private static int SLAVES = 3;
-  private static byte [] ROW = Bytes.toBytes("testRow");
+  private static final byte [] ROW = Bytes.toBytes("testRow");
   private static final byte[] ANOTHERROW = Bytes.toBytes("anotherrow");
-  private static byte [] QUALIFIER = Bytes.toBytes("testQualifier");
-  private static byte [] VALUE = Bytes.toBytes("testValue");
-  private final static byte[] COL_QUAL = Bytes.toBytes("f1");
-  private final static byte[] VAL_BYTES = Bytes.toBytes("v1");
-  private final static byte[] ROW_BYTES = Bytes.toBytes("r1");
+  private static final byte [] QUALIFIER = Bytes.toBytes("testQualifier");
+  private static final byte [] VALUE = Bytes.toBytes("testValue");
+  private static final byte[] COL_QUAL = Bytes.toBytes("f1");
+  private static final byte[] VAL_BYTES = Bytes.toBytes("v1");
+  private static final byte[] ROW_BYTES = Bytes.toBytes("r1");
 
   /**
    * @throws java.lang.Exception
@@ -365,7 +365,7 @@ public class TestFromClientSide3 {
           break;
         }
       } catch (Exception e) {
-        LOG.debug("Waiting for region to come online: " + regionName);
+        LOG.debug("Waiting for region to come online: " + Bytes.toString(regionName));
       }
       Thread.sleep(40);
     }
@@ -481,6 +481,7 @@ public class TestFromClientSide3 {
     assertEquals(exist, true);
   }
 
+  @Test
   public void testHTableExistsMethodSingleRegionMultipleGets() throws Exception {
 
     HTable table = TEST_UTIL.createTable(
@@ -492,13 +493,11 @@ public class TestFromClientSide3 {
 
     List<Get> gets = new ArrayList<Get>();
     gets.add(new Get(ROW));
-    gets.add(null);
     gets.add(new Get(ANOTHERROW));
 
     Boolean[] results = table.exists(gets);
-    assertEquals(results[0], true);
-    assertEquals(results[1], false);
-    assertEquals(results[2], false);
+    assertTrue(results[0]);
+    assertFalse(results[1]);
   }
 
   @Test
@@ -691,6 +690,7 @@ public class TestFromClientSide3 {
       cpService.execute(new Runnable() {
         @Override
         public void run() {
+          boolean threw;
           Put put1 = new Put(row);
           Put put2 = new Put(rowLocked);
           put1.addColumn(FAMILY, QUALIFIER, value1);
@@ -712,8 +712,13 @@ public class TestFromClientSide3 {
                   return rpcCallback.get();
                 }
               });
-            fail("This cp should fail because the target lock is blocked by previous put");
+            threw = false;
           } catch (Throwable ex) {
+            threw = true;
+          }
+          if (!threw) {
+            // Can't call fail() earlier because the catch would eat it.
+            fail("This cp should fail because the target lock is blocked by previous put");
           }
         }
       });

http://git-wip-us.apache.org/repos/asf/hbase/blob/110f2db7/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
index 4a37f88..6106987 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestSplitTable.java
@@ -33,7 +33,7 @@ import org.junit.experimental.categories.Category;
 public class TestSplitTable {
 
   @Test
-  @SuppressWarnings("deprecation")
+  @SuppressWarnings({"deprecation", "SelfComparison"})
   public void testSplitTableCompareTo() {
     TableSplit aTableSplit = new TableSplit(Bytes.toBytes("tableA"),
         Bytes.toBytes("aaa"), Bytes.toBytes("ddd"), "locationA");
@@ -44,9 +44,9 @@ public class TestSplitTable {
     TableSplit cTableSplit = new TableSplit(Bytes.toBytes("tableA"),
         Bytes.toBytes("lll"), Bytes.toBytes("zzz"), "locationA");
 
-    assertTrue(aTableSplit.compareTo(aTableSplit) == 0);
-    assertTrue(bTableSplit.compareTo(bTableSplit) == 0);
-    assertTrue(cTableSplit.compareTo(cTableSplit) == 0);
+    assertEquals(0, aTableSplit.compareTo(aTableSplit));
+    assertEquals(0, bTableSplit.compareTo(bTableSplit));
+    assertEquals(0, cTableSplit.compareTo(cTableSplit));
 
     assertTrue(aTableSplit.compareTo(bTableSplit) < 0);
     assertTrue(bTableSplit.compareTo(aTableSplit) > 0);

http://git-wip-us.apache.org/repos/asf/hbase/blob/110f2db7/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index 83de8f9..743d859 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -436,7 +437,7 @@ public class TestImportTsv implements Configurable {
 
     // run the import
     Tool tool = new ImportTsv();
-    LOG.debug("Running ImportTsv with arguments: " + argsArray);
+    LOG.debug("Running ImportTsv with arguments: " + Arrays.toString(argsArray));
     assertEquals(0, ToolRunner.run(conf, tool, argsArray));
 
     // Perform basic validation. If the input args did not include

http://git-wip-us.apache.org/repos/asf/hbase/blob/110f2db7/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapperForOprAttr.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapperForOprAttr.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapperForOprAttr.java
index 9d8b8f0..4050862 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapperForOprAttr.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapperForOprAttr.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase.mapreduce;
 
 import java.io.IOException;
+import java.util.Arrays;
 
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Put;
@@ -44,10 +45,10 @@ public class TsvImporterCustomTestMapperForOprAttr extends TsvImporterMapper
{
       for (String attr : attributes) {
         String[] split = attr.split(ImportTsv.DEFAULT_ATTRIBUTES_SEPERATOR);
         if (split == null || split.length <= 1) {
-          throw new BadTsvLineException("Invalid attributes seperator specified" + attributes);
+          throw new BadTsvLineException(msg(attributes));
         } else {
           if (split[0].length() <= 0 || split[1].length() <= 0) {
-            throw new BadTsvLineException("Invalid attributes seperator specified" + attributes);
+            throw new BadTsvLineException(msg(attributes));
           }
           put.setAttribute(split[0], Bytes.toBytes(split[1]));
         }
@@ -55,4 +56,8 @@ public class TsvImporterCustomTestMapperForOprAttr extends TsvImporterMapper
{
     }
     put.add(kv);
   }
+
+  private String msg(Object[] attributes) {
+    return "Invalid attributes separator specified: " + Arrays.toString(attributes);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/110f2db7/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
index 3edf7c7..e838b4c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
@@ -443,6 +443,7 @@ public class TestMajorCompaction {
    * basically works.
    * @throws IOException
    */
+  @Test
   public void testMajorCompactingToNoOutputWithReverseScan() throws IOException {
     createStoreFile(r);
     for (int i = 0; i < compactionThreshold; i++) {


Mime
View raw message