hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From md...@apache.org
Subject hbase git commit: HBASE-19240 more error-prone results
Date Mon, 13 Nov 2017 01:48:03 GMT
Repository: hbase
Updated Branches:
  refs/heads/branch-2 bc8048cf6 -> cd681f26b


HBASE-19240 more error-prone results


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cd681f26
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cd681f26
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cd681f26

Branch: refs/heads/branch-2
Commit: cd681f26bc27c2f5d6386060d456e233568044b4
Parents: bc8048c
Author: Mike Drob <mdrob@apache.org>
Authored: Fri Nov 10 16:32:25 2017 -0600
Committer: Mike Drob <mdrob@apache.org>
Committed: Sun Nov 12 19:47:38 2017 -0600

----------------------------------------------------------------------
 .../hbase/client/TestRpcControllerFactory.java  |   8 +-
 .../hbase/ipc/IntegrationTestRpcClient.java     |  10 +-
 .../hadoop/hbase/rest/model/VersionModel.java   |   5 +-
 .../hbase/rest/HBaseRESTTestingUtility.java     |   3 +-
 .../hadoop/hbase/rest/TestVersionResource.java  |   8 +-
 .../hbase/rest/model/TestVersionModel.java      |   5 +
 .../org/apache/hadoop/hbase/TestZooKeeper.java  |   5 +-
 .../hadoop/hbase/client/TestFromClientSide.java |   8 +-
 .../coprocessor/TestCoprocessorInterface.java   |   4 +-
 ...erverForAddingMutationsFromCoprocessors.java |   2 +-
 .../apache/hadoop/hbase/filter/TestFilter.java  |   6 +-
 .../TestMasterOperationsForRegionReplicas.java  |   3 +-
 .../hbase/master/TestRegionPlacement.java       |   3 +-
 .../normalizer/TestSimpleRegionNormalizer.java  |   6 +-
 ...TestMasterProcedureSchedulerConcurrency.java |   2 +
 .../procedure/TestWALProcedureStoreOnHDFS.java  | 149 +++++++++----------
 .../hadoop/hbase/mob/TestCachedMobFile.java     |   1 +
 .../TestEndToEndSplitTransaction.java           |   4 +-
 .../hbase/regionserver/TestHRegionInfo.java     |   1 +
 .../hbase/regionserver/TestKeepDeletes.java     |   1 +
 .../regionserver/TestMemStoreChunkPool.java     |   4 +-
 .../regionserver/TestRegionServerMetrics.java   |   4 +-
 .../regionserver/TestServerNonceManager.java    |   2 +-
 .../TestStoreFileRefresherChore.java            |  33 ++--
 .../replication/TestReplicationEndpoint.java    |   2 +-
 .../security/access/TestTablePermissions.java   |   4 +-
 .../hbase/util/MultiThreadedUpdaterWithACL.java |   8 +-
 27 files changed, 150 insertions(+), 141 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
index 9007f65..2f9fe2a 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
@@ -22,14 +22,10 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-
 import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.curator.shaded.com.google.common.collect.ConcurrentHashMultiset;
-import org.apache.curator.shaded.com.google.common.collect.Multiset;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CellScannable;
 import org.apache.hadoop.hbase.CellScanner;
@@ -41,9 +37,13 @@ import org.apache.hadoop.hbase.coprocessor.ProtobufCoprocessorService;
 import org.apache.hadoop.hbase.ipc.DelegatingHBaseRpcController;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.ConcurrentHashMultiset;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multiset;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
+
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Rule;

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
index f955610..27a2d85 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
@@ -198,24 +198,22 @@ public class IntegrationTestRpcClient {
     @Override
     public void run() {
       while (running.get()) {
-        switch (random.nextInt() % 2) {
-        case 0: //start a server
+        if (random.nextBoolean()) {
+          //start a server
           try {
             cluster.startServer();
           } catch (Exception e) {
             LOG.warn(e);
             exception.compareAndSet(null, e);
           }
-          break;
-
-        case 1: // stop a server
+        } else {
+          // stop a server
           try {
             cluster.stopRandomServer();
           } catch (Exception e) {
             LOG.warn(e);
             exception.compareAndSet(null, e);
           }
-        default:
         }
 
         Threads.sleep(100);

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java
index 13a0cee..378d149 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java
@@ -75,8 +75,9 @@ public class VersionModel implements Serializable, ProtobufMessageHandler
{
       System.getProperty("os.version") + ' ' +
       System.getProperty("os.arch");
     serverVersion = context.getServerInfo();
-    jerseyVersion = ServletContainer.class.getClass().getPackage()
-      .getImplementationVersion();
+    jerseyVersion = ServletContainer.class.getPackage().getImplementationVersion();
+    // Currently, this will always be null because the manifest doesn't have any useful information
+    if (jerseyVersion == null) jerseyVersion = "";
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
index 4c977fd..4cce21b 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
@@ -38,6 +38,7 @@ import org.glassfish.jersey.server.ResourceConfig;
 import org.glassfish.jersey.servlet.ServletContainer;
 
 import javax.servlet.DispatcherType;
+import java.util.Arrays;
 import java.util.EnumSet;
 
 public class HBaseRESTTestingUtility {
@@ -87,7 +88,7 @@ public class HBaseRESTTestingUtility {
       filter = filter.trim();
       ctxHandler.addFilter(filter, "/*", EnumSet.of(DispatcherType.REQUEST));
     }
-    LOG.info("Loaded filter classes :" + filterClasses);
+    LOG.info("Loaded filter classes :" + Arrays.toString(filterClasses));
 
     conf.set(RESTServer.REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY, ".*");
     RESTServer.addCSRFFilter(ctxHandler, conf);

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
index e76422b..1f927f5 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
@@ -93,8 +93,8 @@ public class TestVersionResource {
     assertNotNull(model.getServerVersion());
     String jerseyVersion = model.getJerseyVersion();
     assertNotNull(jerseyVersion);
-    assertEquals(jerseyVersion, ServletContainer.class.getClass().getPackage()
-      .getImplementationVersion());
+    // TODO: fix when we actually get a jersey version
+    // assertEquals(jerseyVersion, ServletContainer.class.getPackage().getImplementationVersion());
   }
 
   @Test
@@ -111,8 +111,8 @@ public class TestVersionResource {
     assertTrue(body.contains(System.getProperty("os.name")));
     assertTrue(body.contains(System.getProperty("os.version")));
     assertTrue(body.contains(System.getProperty("os.arch")));
-    assertTrue(body.contains(ServletContainer.class.getClass().getPackage()
-      .getImplementationVersion()));
+    // TODO: fix when we actually get a jersey version
+    // assertTrue(body.contains(ServletContainer.class.getPackage().getImplementationVersion()));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
index e8da529..081f7a0 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
@@ -21,7 +21,12 @@ package org.apache.hadoop.hbase.rest.model;
 import org.apache.hadoop.hbase.testclassification.RestTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 
+import org.junit.Assume;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+
+import javax.servlet.ServletContext;
 
 @Category({RestTests.class, SmallTests.class})
 public class TestVersionModel extends TestModelBase<VersionModel> {

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
index 4ed8d91..f75c7a4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
@@ -69,6 +69,7 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -143,8 +144,8 @@ public class TestZooKeeper {
    * @throws IOException
    * @throws InterruptedException
    */
-  // fails frequently, disabled for now, see HBASE-6406
-  //@Test
+  @Ignore("fails frequently, disabled for now, see HBASE-6406")
+  @Test
   public void testClientSessionExpired() throws Exception {
     Configuration c = new Configuration(TEST_UTIL.getConfiguration());
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 02d3797..31d34d7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -4688,8 +4688,8 @@ public class TestFromClientSide {
       NavigableMap<Long, byte[]> navigableMap = result.getMap().get(FAMILY)
           .get(QUALIFIER);
 
-      assertEquals("The number of versions of '" + FAMILY + ":" + QUALIFIER
-          + " did not match " + versions, versions, navigableMap.size());
+      assertEquals("The number of versions of '" + Bytes.toString(FAMILY) + ":"
+          + Bytes.toString(QUALIFIER) + " did not match", versions, navigableMap.size());
       for (Map.Entry<Long, byte[]> entry : navigableMap.entrySet()) {
         assertTrue("The value at time " + entry.getKey()
             + " did not match what was put",
@@ -4724,8 +4724,8 @@ public class TestFromClientSide {
       NavigableMap<Long, byte[]> navigableMap = result.getMap().get(FAMILY)
           .get(QUALIFIER);
 
-      assertEquals("The number of versions of '" + FAMILY + ":" + QUALIFIER + " did not match
" +
-        versions + "; " + put.toString() + ", " + get.toString(), versions, navigableMap.size());
+      assertEquals("The number of versions of '" + Bytes.toString(FAMILY) + ":"
+          + Bytes.toString(QUALIFIER) + " did not match", versions, navigableMap.size());
       for (Map.Entry<Long, byte[]> entry : navigableMap.entrySet()) {
         assertTrue("The value at time " + entry.getKey()
             + " did not match what was put",

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
index 4e3a652..61b4808 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
@@ -272,9 +272,7 @@ public class TestCoprocessorInterface {
         @Override
         public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e,
             final Get get, final List<Cell> results) throws IOException {
-          if (1/0 == 1) {
-            e.complete();
-          }
+          throw new RuntimeException();
         }
       });
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java
index 8897957..3e1621c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java
@@ -212,7 +212,7 @@ public class TestRegionObserverForAddingMutationsFromCoprocessors {
           new Put(row2).addColumn(test, dummy, cells.get(0).getTimestamp(), dummy),
           new Put(row3).addColumn(test, dummy, cells.get(0).getTimestamp(), dummy),
       };
-      LOG.info("Putting:" + puts);
+      LOG.info("Putting:" + Arrays.toString(puts));
       miniBatchOp.addOperationsFromCP(0, puts);
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index 637720a..b4d1935 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -56,6 +56,7 @@ import org.apache.hadoop.hbase.wal.WAL;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -622,7 +623,7 @@ public class TestFilter {
    * @throws Exception
    */
   @Test
-  public void tes94FilterRowCompatibility() throws Exception {
+  public void test94FilterRowCompatibility() throws Exception {
     Scan s = new Scan();
     OldTestFilter filter = new OldTestFilter();
     s.setFilter(filter);
@@ -2051,7 +2052,8 @@ public class TestFilter {
     }
   }
 
-  // TODO: intentionally disabled?
+  @Test
+  @Ignore("TODO: intentionally disabled?")
   public void testNestedFilterListWithSCVF() throws IOException {
     byte[] columnStatus = Bytes.toBytes("S");
     HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index b73c873..89f7824 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -55,6 +55,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -239,7 +240,7 @@ public class TestMasterOperationsForRegionReplicas {
     }
   }
 
-  //@Test (TODO: enable when we have support for alter_table- HBASE-10361).
+  @Test @Ignore("Enable when we have support for alter_table- HBASE-10361")
   public void testIncompleteMetaTableReplicaInformation() throws Exception {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     final int numRegions = 3;

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
index 5dc55f2..0c9e33e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java
@@ -421,7 +421,8 @@ public class TestRegionPlacement {
       for (Region region: rs.getRegions(TableName.valueOf("testRegionAssignment"))) {
         InetSocketAddress[] favoredSocketAddress = rs.getFavoredNodesForRegion(
             region.getRegionInfo().getEncodedName());
-        List<ServerName> favoredServerList = plan.getAssignmentMap().get(region.getRegionInfo());
+        String regionName = region.getRegionInfo().getRegionNameAsString();
+        List<ServerName> favoredServerList = plan.getAssignmentMap().get(regionName);
 
         // All regions are supposed to have favored nodes,
         // except for hbase:meta and ROOT

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
index 2af4b47..0936c16 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
@@ -343,6 +343,7 @@ public class TestSimpleRegionNormalizer {
     assertEquals(hri4, ((SplitNormalizationPlan) plan).getRegionInfo());
   }
 
+  @SuppressWarnings("MockitoCast")
   protected void setupMocksForNormalizer(Map<byte[], Integer> regionSizes,
                                          List<RegionInfo> RegionInfo) {
     masterServices = Mockito.mock(MasterServices.class, RETURNS_DEEP_STUBS);
@@ -360,7 +361,10 @@ public class TestSimpleRegionNormalizer {
       when(regionLoad.getName()).thenReturn(region.getKey());
       when(regionLoad.getStorefileSizeMB()).thenReturn(region.getValue());
 
-      when(masterServices.getServerManager().getLoad(sn).
+      // this is possibly broken with jdk9, unclear if false positive or not
+      // suppress it for now, fix it when we get to running tests on 9
+      // see: http://errorprone.info/bugpattern/MockitoCast
+      when((Object) masterServices.getServerManager().getLoad(sn).
         getRegionsLoad().get(region.getKey())).thenReturn(regionLoad);
     }
     try {

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
index 6d88502..af48f64 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
@@ -207,6 +207,8 @@ public class TestMasterProcedureSchedulerConcurrency {
         case READ:
           queue.wakeTableSharedLock(proc, getTableName(proc));
           break;
+        default:
+          throw new UnsupportedOperationException();
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
index 7932d00..157d08b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDFS.java
@@ -35,13 +35,14 @@ import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
 @Category({MasterTests.class, LargeTests.class})
 public class TestWALProcedureStoreOnHDFS {
@@ -62,7 +63,10 @@ public class TestWALProcedureStoreOnHDFS {
     }
   };
 
-  private static void initConfig(Configuration conf) {
+  @Before
+  public void initConfig() {
+    Configuration conf = UTIL.getConfiguration();
+
     conf.setInt("dfs.replication", 3);
     conf.setInt("dfs.namenode.replication.min", 3);
 
@@ -72,7 +76,8 @@ public class TestWALProcedureStoreOnHDFS {
     conf.setInt(WALProcedureStore.MAX_SYNC_FAILURE_ROLL_CONF_KEY, 10);
   }
 
-  public void setup() throws Exception {
+  // No @Before because some tests need to do additional config first
+  private void setupDFS() throws Exception {
     MiniDFSCluster dfs = UTIL.startMiniDFSCluster(3);
 
     Path logDir = new Path(new Path(dfs.getFileSystem().getUri()), "/test-logs");
@@ -82,6 +87,7 @@ public class TestWALProcedureStoreOnHDFS {
     store.recoverLease();
   }
 
+  @After
   public void tearDown() throws Exception {
     store.stop(false);
     UTIL.getDFSCluster().getFileSystem().delete(store.getWALDir(), true);
@@ -95,102 +101,85 @@ public class TestWALProcedureStoreOnHDFS {
 
   @Test(timeout=60000, expected=RuntimeException.class)
   public void testWalAbortOnLowReplication() throws Exception {
-    initConfig(UTIL.getConfiguration());
-    setup();
-    try {
-      assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
+    setupDFS();
 
-      LOG.info("Stop DataNode");
-      UTIL.getDFSCluster().stopDataNode(0);
-      assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
+    assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
 
-      store.insert(new TestProcedure(1, -1), null);
-      for (long i = 2; store.isRunning(); ++i) {
-        assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
-        store.insert(new TestProcedure(i, -1), null);
-        Thread.sleep(100);
-      }
-      assertFalse(store.isRunning());
-      fail("The store.insert() should throw an exeption");
-    } finally {
-      tearDown();
+    LOG.info("Stop DataNode");
+    UTIL.getDFSCluster().stopDataNode(0);
+    assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
+
+    store.insert(new TestProcedure(1, -1), null);
+    for (long i = 2; store.isRunning(); ++i) {
+      assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
+      store.insert(new TestProcedure(i, -1), null);
+      Thread.sleep(100);
     }
+    assertFalse(store.isRunning());
   }
 
   @Test(timeout=60000)
   public void testWalAbortOnLowReplicationWithQueuedWriters() throws Exception {
-    initConfig(UTIL.getConfiguration());
-    setup();
-    try {
-      assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
-      store.registerListener(new ProcedureStore.ProcedureStoreListener() {
-        @Override
-        public void postSync() {
-          Threads.sleepWithoutInterrupt(2000);
+    setupDFS();
+
+    assertEquals(3, UTIL.getDFSCluster().getDataNodes().size());
+    store.registerListener(new ProcedureStore.ProcedureStoreListener() {
+      @Override
+      public void postSync() { Threads.sleepWithoutInterrupt(2000); }
+
+      @Override
+      public void abortProcess() {}
+    });
+
+    final AtomicInteger reCount = new AtomicInteger(0);
+    Thread[] thread = new Thread[store.getNumThreads() * 2 + 1];
+    for (int i = 0; i < thread.length; ++i) {
+      final long procId = i + 1;
+      thread[i] = new Thread(() -> {
+        try {
+          LOG.debug("[S] INSERT " + procId);
+          store.insert(new TestProcedure(procId, -1), null);
+          LOG.debug("[E] INSERT " + procId);
+        } catch (RuntimeException e) {
+          reCount.incrementAndGet();
+          LOG.debug("[F] INSERT " + procId + ": " + e.getMessage());
         }
-
-        @Override
-        public void abortProcess() {}
       });
+      thread[i].start();
+    }
 
-      final AtomicInteger reCount = new AtomicInteger(0);
-      Thread[] thread = new Thread[store.getNumThreads() * 2 + 1];
-      for (int i = 0; i < thread.length; ++i) {
-        final long procId = i + 1;
-        thread[i] = new Thread() {
-          public void run() {
-            try {
-              LOG.debug("[S] INSERT " + procId);
-              store.insert(new TestProcedure(procId, -1), null);
-              LOG.debug("[E] INSERT " + procId);
-            } catch (RuntimeException e) {
-              reCount.incrementAndGet();
-              LOG.debug("[F] INSERT " + procId + ": " + e.getMessage());
-            }
-          }
-        };
-        thread[i].start();
-      }
-
-      Thread.sleep(1000);
-      LOG.info("Stop DataNode");
-      UTIL.getDFSCluster().stopDataNode(0);
-      assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
-
-      for (int i = 0; i < thread.length; ++i) {
-        thread[i].join();
-      }
+    Thread.sleep(1000);
+    LOG.info("Stop DataNode");
+    UTIL.getDFSCluster().stopDataNode(0);
+    assertEquals(2, UTIL.getDFSCluster().getDataNodes().size());
 
-      assertFalse(store.isRunning());
-      assertTrue(reCount.toString(), reCount.get() >= store.getNumThreads() &&
-                                     reCount.get() < thread.length);
-    } finally {
-      tearDown();
+    for (int i = 0; i < thread.length; ++i) {
+      thread[i].join();
     }
+
+    assertFalse(store.isRunning());
+    assertTrue(reCount.toString(), reCount.get() >= store.getNumThreads() &&
+                                   reCount.get() < thread.length);
   }
 
   @Test(timeout=60000)
   public void testWalRollOnLowReplication() throws Exception {
-    initConfig(UTIL.getConfiguration());
     UTIL.getConfiguration().setInt("dfs.namenode.replication.min", 1);
-    setup();
-    try {
-      int dnCount = 0;
-      store.insert(new TestProcedure(1, -1), null);
-      UTIL.getDFSCluster().restartDataNode(dnCount);
-      for (long i = 2; i < 100; ++i) {
-        store.insert(new TestProcedure(i, -1), null);
-        waitForNumReplicas(3);
-        Thread.sleep(100);
-        if ((i % 30) == 0) {
-          LOG.info("Restart Data Node");
-          UTIL.getDFSCluster().restartDataNode(++dnCount % 3);
-        }
+    setupDFS();
+
+    int dnCount = 0;
+    store.insert(new TestProcedure(1, -1), null);
+    UTIL.getDFSCluster().restartDataNode(dnCount);
+    for (long i = 2; i < 100; ++i) {
+      store.insert(new TestProcedure(i, -1), null);
+      waitForNumReplicas(3);
+      Thread.sleep(100);
+      if ((i % 30) == 0) {
+        LOG.info("Restart Data Node");
+        UTIL.getDFSCluster().restartDataNode(++dnCount % 3);
       }
-      assertTrue(store.isRunning());
-    } finally {
-      tearDown();
     }
+    assertTrue(store.isRunning());
   }
 
   public void waitForNumReplicas(int numReplicas) throws Exception {

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
index 276fedb..61f1cce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestCachedMobFile.java
@@ -72,6 +72,7 @@ public class TestCachedMobFile extends TestCase{
     Assert.assertEquals(EXPECTED_REFERENCE_ZERO, cachedMobFile.getReferenceCount());
   }
 
+  @SuppressWarnings("SelfComparison")
   @Test
   public void testCompare() throws Exception {
     String caseName = getName();

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
index 38f3060..fa4d7f0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
@@ -366,8 +366,8 @@ public class TestEndToEndSplitTransaction {
       }
       if (daughterA == null || daughterB == null) {
         throw new IOException("Failed to get daughters, daughterA=" + daughterA + ", daughterB="
+
-          daughterB + ", timeout=" + timeout + ", result=" + result + ", regionName=" + regionName
+
-          ", region=" + region);
+          daughterB + ", timeout=" + timeout + ", result=" + result + ", regionName=" +
+          Bytes.toString(regionName) + ", region=" + region);
       }
 
       //if we are here, this means the region split is complete or timed out

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
index 0257cc0..53ef976 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
@@ -162,6 +162,7 @@ public class TestHRegionInfo {
     assertTrue(HRegionInfo.FIRST_META_REGIONINFO.isMetaRegion());
   }
 
+  @SuppressWarnings("SelfComparison")
   @Test
   public void testComparator() {
     final TableName tableName = TableName.valueOf(name.getMethodName());

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
index 5a20882..ee29ef2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
@@ -764,6 +764,7 @@ public class TestKeepDeletes {
   /**
    * Verify scenarios with multiple CFs and columns
    */
+  @Test
   public void testWithMixedCFs() throws Exception {
     HTableDescriptor htd = hbu.createTableDescriptor(name.getMethodName(), 0, 1,
         HConstants.FOREVER, KeepDeletedCells.TRUE);

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java
index aedb905..69d7589 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreChunkPool.java
@@ -27,8 +27,8 @@ import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -69,7 +69,7 @@ public class TestMemStoreChunkPool {
     ChunkCreator.chunkPoolDisabled = chunkPoolDisabledBeforeTest;
   }
 
-  @Before
+  @After
   public void tearDown() throws Exception {
     chunkCreator.clearChunksInPool();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index 80d170a..c23c786 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -237,8 +237,8 @@ public class TestRegionServerMetrics {
     ResultScanner scanner = table.getScanner(scan);
     for (int i = 0; i < n; i++) {
       Result res = scanner.next();
-      LOG.debug(
-        "Result row: " + Bytes.toString(res.getRow()) + ", value: " + res.getValue(cf, qualifier));
+      LOG.debug("Result row: " + Bytes.toString(res.getRow()) + ", value: " +
+          Bytes.toString(res.getValue(cf, qualifier)));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerNonceManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerNonceManager.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerNonceManager.java
index 89e414d..23296d0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerNonceManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerNonceManager.java
@@ -101,7 +101,7 @@ public class TestServerNonceManager {
     ServerNonceManager nm = createManager();
     try {
       nm.endOperation(NO_NONCE, 1, true);
-      fail("Should have thrown");
+      throw new Error("Should have thrown");
     } catch (AssertionError err) {}
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java
index 7919391..416b194 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
@@ -50,7 +51,6 @@ import org.apache.hadoop.hbase.wal.WALFactory;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.StoppableImplementation;
-import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -133,6 +133,19 @@ public class TestStoreFileRefresherChore {
     }
   }
 
+  private void verifyDataExpectFail(Region newReg, int startRow, int numRows, byte[] qf,
+      byte[]... families) throws IOException {
+    boolean threw = false;
+    try {
+      verifyData(newReg, startRow, numRows, qf, families);
+    } catch (AssertionError e) {
+      threw = true;
+    }
+    if (!threw) {
+      fail("Expected data verification to fail");
+    }
+  }
+
   private void verifyData(Region newReg, int startRow, int numRows, byte[] qf, byte[]...
families)
       throws IOException {
     for (int i = startRow; i < startRow + numRows; i++) {
@@ -189,17 +202,12 @@ public class TestStoreFileRefresherChore {
     primary.flush(true);
     verifyData(primary, 0, 100, qf, families);
 
-    try {
-      verifyData(replica1, 0, 100, qf, families);
-      Assert.fail("should have failed");
-    } catch(AssertionError ex) {
-      // expected
-    }
+    verifyDataExpectFail(replica1, 0, 100, qf, families);
     chore.chore();
     verifyData(replica1, 0, 100, qf, families);
 
     // simulate an fs failure where we cannot refresh the store files for the replica
-    ((FailingHRegionFileSystem)((HRegion)replica1).getRegionFileSystem()).fail = true;
+    ((FailingHRegionFileSystem)replica1.getRegionFileSystem()).fail = true;
 
     // write some more data to primary and flush
     putData(primary, 100, 100, qf, families);
@@ -209,18 +217,13 @@ public class TestStoreFileRefresherChore {
     chore.chore(); // should not throw ex, but we cannot refresh the store files
 
     verifyData(replica1, 0, 100, qf, families);
-    try {
-      verifyData(replica1, 100, 100, qf, families);
-      Assert.fail("should have failed");
-    } catch(AssertionError ex) {
-      // expected
-    }
+    verifyDataExpectFail(replica1, 100, 100, qf, families);
 
     chore.isStale = true;
     chore.chore(); //now after this, we cannot read back any value
     try {
       verifyData(replica1, 0, 100, qf, families);
-      Assert.fail("should have failed with IOException");
+      fail("should have failed with IOException");
     } catch(IOException ex) {
       // expected
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
index 6396228..b76ebb1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
@@ -490,7 +490,7 @@ public class TestReplicationEndpoint extends TestReplicationBase {
       }
 
       super.replicate(replicateContext);
-      LOG.info("Replicated " + row + ", count=" + replicateCount.get());
+      LOG.info("Replicated " + Bytes.toString(row) + ", count=" + replicateCount.get());
 
       replicated.set(replicateCount.get() > COUNT); // first 10 times, we return false
       return replicated.get();

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
index 6ca9790..30e8396 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
@@ -189,9 +189,9 @@ public class TestTablePermissions {
     permission = userPerms.get(0);
     assertEquals("Permission should be for " + TEST_TABLE,
         TEST_TABLE, permission.getTableName());
-    assertTrue("Permission should be for family " + TEST_FAMILY,
+    assertTrue("Permission should be for family " + Bytes.toString(TEST_FAMILY),
         Bytes.equals(TEST_FAMILY, permission.getFamily()));
-    assertTrue("Permission should be for qualifier " + TEST_QUALIFIER,
+    assertTrue("Permission should be for qualifier " + Bytes.toString(TEST_QUALIFIER),
         Bytes.equals(TEST_QUALIFIER, permission.getQualifier()));
 
     // check actions

http://git-wip-us.apache.org/repos/asf/hbase/blob/cd681f26/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
index 9d9bb63..68537a4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
@@ -125,8 +125,8 @@ public class MultiThreadedUpdaterWithACL extends MultiThreadedUpdater
{
               res = localTable.get(get);
             }
           } catch (IOException ie) {
-            LOG.warn("Failed to get the row for key = [" + get.getRow() + "], column family
= ["
-                + Bytes.toString(cf) + "]", ie);
+            LOG.warn("Failed to get the row for key = [" + Bytes.toString(get.getRow()) +
+                "], column family = [" + Bytes.toString(cf) + "]", ie);
           }
           return res;
         }
@@ -151,8 +151,8 @@ public class MultiThreadedUpdaterWithACL extends MultiThreadedUpdater
{
           Result result = (Result) user.runAs(action);
           return result;
         } catch (Exception ie) {
-          LOG.warn("Failed to get the row for key = [" + get.getRow() + "], column family
= ["
-              + Bytes.toString(cf) + "]", ie);
+          LOG.warn("Failed to get the row for key = [" + Bytes.toString(get.getRow()) +
+              "], column family = [" + Bytes.toString(cf) + "]", ie);
         }
       }
       // This means that no users were present


Mime
View raw message