hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zhang...@apache.org
Subject [30/42] hbase git commit: HBASE-10092 Move up on to log4j2
Date Thu, 21 Dec 2017 13:25:19 GMT
http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
index 3135bd0..d5b41a6 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java
@@ -22,8 +22,6 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 import com.codahale.metrics.Histogram;
 import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.chaos.actions.MoveRandomRegionOfTableAction;
 import org.apache.hadoop.hbase.chaos.actions.RestartRandomRsExceptMetaAction;
@@ -40,6 +38,8 @@ import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.*;
 import java.util.concurrent.Callable;
@@ -59,7 +59,7 @@ import static org.junit.Assert.assertTrue;
 @Category(IntegrationTests.class)
 public class IntegrationTestRegionReplicaPerf extends IntegrationTestBase {
 
-  private static final Log LOG = LogFactory.getLog(IntegrationTestRegionReplicaPerf.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestRegionReplicaPerf.class);
 
   private static final String SLEEP_TIME_KEY = "sleeptime";
   // short default interval because tests don't run very long.

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java
index 47ce9e1..e3d6def 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java
@@ -23,14 +23,14 @@ import java.util.Set;
 import java.util.regex.Pattern;
 
 import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.internal.TextListener;
 import org.junit.runner.JUnitCore;
 import org.junit.runner.Result;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class drives the Integration test suite execution. Executes all
@@ -40,7 +40,7 @@ import org.junit.runner.Result;
 public class IntegrationTestsDriver extends AbstractHBaseTool {
   private static final String SHORT_REGEX_ARG = "r";
   private static final String LONG_REGEX_ARG = "regex";
-  private static final Log LOG = LogFactory.getLog(IntegrationTestsDriver.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestsDriver.class);
   private IntegrationTestFilter intTestFilter = new IntegrationTestFilter();
 
   public static void main(String[] args) throws Exception {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
index 8f69d33..1cd8147 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java
@@ -20,12 +20,14 @@ package org.apache.hadoop.hbase;
 
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import javax.ws.rs.client.Client;
 import javax.ws.rs.client.ClientBuilder;
 import javax.ws.rs.client.Entity;
@@ -98,7 +100,7 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
   // because cluster managers don't tend to implement these operations.
   private ClusterManager hBaseClusterManager;
 
-  private static final Log LOG = LogFactory.getLog(RESTApiClusterManager.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RESTApiClusterManager.class);
 
   RESTApiClusterManager() {
     hBaseClusterManager = ReflectionUtils.newInstance(HBaseClusterManager.class,

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java
index 9e95b01..2ecc810 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java
@@ -24,8 +24,6 @@ import java.util.Set;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
@@ -42,6 +40,8 @@ import org.apache.hadoop.hbase.util.RegionSplitter;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
 import org.apache.hadoop.hbase.util.LoadTestKVGenerator;
 import org.junit.Assert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
@@ -49,7 +49,9 @@ import org.junit.Assert;
  */
 @InterfaceAudience.Private
 public class StripeCompactionsPerformanceEvaluation extends AbstractHBaseTool {
-  private static final Log LOG = LogFactory.getLog(StripeCompactionsPerformanceEvaluation.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(StripeCompactionsPerformanceEvaluation.class);
+
   private static final TableName TABLE_NAME =
     TableName.valueOf(StripeCompactionsPerformanceEvaluation.class.getSimpleName());
   private static final byte[] COLUMN_FAMILY = Bytes.toBytes("CF");
@@ -195,8 +197,8 @@ public class StripeCompactionsPerformanceEvaluation extends AbstractHBaseTool {
 
   private void runOneTest(String description, Configuration conf) throws Exception {
     int numServers = util.getHBaseClusterInterface().getClusterStatus().getServersSize();
-    long startKey = (long)preloadKeys * numServers;
-    long endKey = startKey + (long)writeKeys * numServers;
+    long startKey = preloadKeys * numServers;
+    long endKey = startKey + writeKeys * numServers;
     status(String.format("%s test starting on %d servers; preloading 0 to %d and writing to %d",
         description, numServers, startKey, endKey));
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java
index 3994aae..7a89569 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/Action.java
@@ -27,8 +27,6 @@ import java.util.function.BiConsumer;
 import java.util.function.Consumer;
 
 import org.apache.commons.lang3.RandomUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ClusterStatus;
 import org.apache.hadoop.hbase.HBaseCluster;
@@ -45,6 +43,8 @@ import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A (possibly mischievous) action that the ChaosMonkey can perform.
@@ -66,7 +66,7 @@ public class Action {
   public static final String START_DATANODE_TIMEOUT_KEY =
     "hbase.chaosmonkey.action.startdatanodetimeout";
 
-  protected static final Log LOG = LogFactory.getLog(Action.class);
+  protected static final Logger LOG = LoggerFactory.getLogger(Action.class);
 
   protected static final long KILL_MASTER_TIMEOUT_DEFAULT = PolicyBasedChaosMonkey.TIMEOUT;
   protected static final long START_MASTER_TIMEOUT_DEFAULT = PolicyBasedChaosMonkey.TIMEOUT;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java
index 800a445..a566527 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.java
@@ -25,10 +25,10 @@ import java.util.List;
 import java.util.Queue;
 
 import org.apache.commons.lang3.RandomUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Restarts a ratio of the regionservers in a rolling fashion. At each step, either kills a
@@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
  * limits the maximum number of servers that can be down at the same time during rolling restarts.
  */
 public class RollingBatchRestartRsAction extends BatchRestartRsAction {
-  private static final Log LOG = LogFactory.getLog(RollingBatchRestartRsAction.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RollingBatchRestartRsAction.class);
   protected int maxDeadServers; // number of maximum dead servers at any given time. Defaults to 5
 
   public RollingBatchRestartRsAction(long sleepTime, float ratio) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java
index c53542c..2ce5b68 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java
@@ -22,20 +22,20 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Base class of the factory that will create a ChaosMonkey.
  */
 public abstract class MonkeyFactory {
-  private static final Log LOG = LogFactory.getLog(MonkeyFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MonkeyFactory.class);
 
   protected TableName tableName;
   protected Set<String> columnFamilies;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
index 70452bb..a49f541 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
@@ -23,18 +23,18 @@ import java.util.Collection;
 import java.util.List;
 
 import org.apache.commons.lang3.RandomUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
 import org.apache.hadoop.hbase.chaos.policies.Policy;
 import org.apache.hadoop.hbase.util.Pair;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Chaos monkey that given multiple policies will run actions against the cluster.
  */
 public class PolicyBasedChaosMonkey extends ChaosMonkey {
 
-  private static final Log LOG = LogFactory.getLog(PolicyBasedChaosMonkey.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PolicyBasedChaosMonkey.class);
   private static final long ONE_SEC = 1000;
   private static final long FIVE_SEC = 5 * ONE_SEC;
   private static final long ONE_MIN = 60 * ONE_SEC;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
index 6b365f8..81267a6 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
@@ -18,18 +18,18 @@
 
 package org.apache.hadoop.hbase.chaos.policies;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
 import org.apache.hadoop.hbase.chaos.actions.Action;
 import org.apache.hadoop.hbase.util.StoppableImplementation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A policy to introduce chaos to the cluster
  */
 public abstract class Policy extends StoppableImplementation implements Runnable {
 
-  protected static final Log LOG = LogFactory.getLog(Policy.class);
+  protected static final Logger LOG = LoggerFactory.getLogger(Policy.class);
 
   protected PolicyContext context;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java
index d72111f..8385c15 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java
@@ -23,8 +23,6 @@ import java.util.Set;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
@@ -34,11 +32,12 @@ import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
 import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.util.ToolRunner;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 
 public class ChaosMonkeyRunner extends AbstractHBaseTool {
-  private static final Log LOG = LogFactory.getLog(ChaosMonkeyRunner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ChaosMonkeyRunner.class);
 
   public static final String MONKEY_LONG_OPT = "monkey";
   public static final String CHAOS_MONKEY_PROPS = "monkeyProps";
@@ -75,7 +74,7 @@ public class ChaosMonkeyRunner extends AbstractHBaseTool {
           monkeyProps.load(this.getClass().getClassLoader()
               .getResourceAsStream(chaosMonkeyPropsFile));
         } catch (IOException e) {
-          LOG.warn(e);
+          LOG.warn(e.toString(), e);
           System.exit(EXIT_FAILURE);
         }
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java
index 1ce4356..cb29427 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/Monkeys.java
@@ -24,11 +24,12 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
@@ -36,7 +37,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
  * This class can be used to control chaos monkeys life cycle.
  */
 public class Monkeys implements Closeable {
-  private static final Log LOG = LogFactory.getLog(Monkeys.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Monkeys.class);
 
   private final Configuration conf;
   private final ChaosMonkeyRunner monkeyRunner;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
index 27a2d85..9754d4e 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java
@@ -37,8 +37,6 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.codec.Codec;
@@ -51,13 +49,14 @@ import org.apache.hadoop.hbase.util.Threads;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 
 @Category(IntegrationTests.class)
 public class IntegrationTestRpcClient {
 
-  private static final Log LOG = LogFactory.getLog(IntegrationTestRpcClient.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestRpcClient.class);
 
   private final Configuration conf;
 
@@ -203,7 +202,7 @@ public class IntegrationTestRpcClient {
           try {
             cluster.startServer();
           } catch (Exception e) {
-            LOG.warn(e);
+            LOG.warn(e.toString(), e);
             exception.compareAndSet(null, e);
           }
         } else {
@@ -211,7 +210,7 @@ public class IntegrationTestRpcClient {
           try {
             cluster.stopRandomServer();
           } catch (Exception e) {
-            LOG.warn(e);
+            LOG.warn(e.toString(), e);
             exception.compareAndSet(null, e);
           }
         }
@@ -261,7 +260,7 @@ public class IntegrationTestRpcClient {
           BlockingInterface stub = newBlockingStub(rpcClient, server.getListenerAddress());
           ret = stub.echo(null, param);
         } catch (Exception e) {
-          LOG.warn(e);
+          LOG.warn(e.toString(), e);
           continue; // expected in case connection is closing or closed
         }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
index 3fa1054..2588e63 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
@@ -34,8 +34,6 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
@@ -88,7 +86,8 @@ import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 
@@ -128,7 +127,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 @Category(IntegrationTests.class)
 public class IntegrationTestBulkLoad extends IntegrationTestBase {
 
-  private static final Log LOG = LogFactory.getLog(IntegrationTestBulkLoad.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestBulkLoad.class);
 
   private static final byte[] CHAIN_FAM = Bytes.toBytes("L");
   private static final byte[] SORT_FAM  = Bytes.toBytes("S");
@@ -197,7 +196,7 @@ public class IntegrationTestBulkLoad extends IntegrationTestBase {
             Thread.sleep(sleepTime.get());
           }
         } catch (InterruptedException e1) {
-          LOG.error(e1);
+          LOG.error(e1.toString(), e1);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
index dfc54e0..ab5f2bb 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
@@ -30,8 +30,6 @@ import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -58,6 +56,8 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Validate ImportTsv + LoadIncrementalHFiles on a distributed cluster.
@@ -66,7 +66,7 @@ import org.junit.rules.TestName;
 public class IntegrationTestImportTsv extends Configured implements Tool {
 
   private static final String NAME = IntegrationTestImportTsv.class.getSimpleName();
-  private static final Log LOG = LogFactory.getLog(IntegrationTestImportTsv.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestImportTsv.class);
 
   protected static final String simple_tsv =
       "row1\t1\tc1\tc2\n" +

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
index 2df1c4b..065cec9 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestTableSnapshotInputFormat.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.mapreduce;
 
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -34,6 +32,8 @@ import org.apache.hadoop.util.ToolRunner;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * An integration test to test {@link TableSnapshotInputFormat} which enables
@@ -68,8 +68,8 @@ import org.junit.experimental.categories.Category;
 @Category(IntegrationTests.class)
 // Not runnable as a unit test. See TestTableSnapshotInputFormat
 public class IntegrationTestTableSnapshotInputFormat extends IntegrationTestBase {
-
-  private static final Log LOG = LogFactory.getLog(IntegrationTestTableSnapshotInputFormat.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(IntegrationTestTableSnapshotInputFormat.class);
 
   private static final String TABLE_NAME_KEY = "IntegrationTestTableSnapshotInputFormat.table";
   private static final String DEFAULT_TABLE_NAME = "IntegrationTestTableSnapshotInputFormat";

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
index e45baf1..bb31ece 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
@@ -30,8 +30,6 @@ import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
 import org.apache.hadoop.hbase.ClusterStatus;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
@@ -74,6 +72,8 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Integration test that should benchmark how fast HBase can recover from failures. This test starts
@@ -121,7 +121,7 @@ public class IntegrationTestMTTR {
    * Constants.
    */
   private static final byte[] FAMILY = Bytes.toBytes("d");
-  private static final Log LOG = LogFactory.getLog(IntegrationTestMTTR.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestMTTR.class);
   private static long sleepTime;
   private static final String SLEEP_TIME_KEY = "hbase.IntegrationTestMTTR.sleeptime";
   private static final long SLEEP_TIME_DEFAULT = 60 * 1000l;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
index b10e54a..f5f2ff9 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
@@ -19,8 +19,6 @@
  */
 package org.apache.hadoop.hbase.rsgroup;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
 import org.apache.hadoop.hbase.Waiter;
@@ -29,6 +27,8 @@ import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Runs all of the units tests defined in TestGroupBase as an integration test.
@@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category;
  */
 @Category(IntegrationTests.class)
 public class IntegrationTestRSGroup extends TestRSGroupsBase {
-  private final static Log LOG = LogFactory.getLog(IntegrationTestRSGroup.class);
+  private final static Logger LOG = LoggerFactory.getLogger(IntegrationTestRSGroup.class);
   private static boolean initialized = false;
 
   @Before

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index b9cc69d..826db07 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -42,8 +42,6 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -119,7 +117,8 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 
 /**
@@ -253,7 +252,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
    */
   static class Generator extends Configured implements Tool {
 
-    private static final Log LOG = LogFactory.getLog(Generator.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Generator.class);
 
     /**
      * Set this configuration if you want to test single-column family flush works. If set, we will
@@ -854,7 +853,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
    * WALs and oldWALs dirs (Some of this is TODO).
    */
   static class Search extends Configured implements Tool {
-    private static final Log LOG = LogFactory.getLog(Search.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Search.class);
     protected Job job;
 
     private static void printUsage(final String error) {
@@ -914,7 +913,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
             try {
               LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey());
             } catch (IOException|InterruptedException e) {
-              LOG.warn(e);
+              LOG.warn(e.toString(), e);
             }
             if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) {
               context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1);
@@ -1016,7 +1015,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
    */
   static class Verify extends Configured implements Tool {
 
-    private static final Log LOG = LogFactory.getLog(Verify.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Verify.class);
     protected static final BytesWritable DEF = new BytesWritable(new byte[] { 0 });
     protected static final BytesWritable DEF_LOST_FAMILIES = new BytesWritable(new byte[] { 1 });
 
@@ -1455,7 +1454,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
    */
   static class Loop extends Configured implements Tool {
 
-    private static final Log LOG = LogFactory.getLog(Loop.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Loop.class);
     private static final String USAGE = "Usage: Loop <num iterations> <num mappers> " +
         "<num nodes per mapper> <output dir> <num reducers> [<width> <wrap multiplier>" +
         " <num walker threads>] \n" +

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
index cdee14d..d0e6e52 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
@@ -24,8 +24,6 @@ import java.util.Iterator;
 import java.util.UUID;
 
 import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
@@ -50,6 +48,7 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.hfile.HFile;
+import org.apache.hadoop.hbase.log.HBaseMarkers;
 import org.apache.hadoop.hbase.mapreduce.Import;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.security.User;
@@ -72,6 +71,8 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * IT test used to verify the deletes with visibility labels.
@@ -123,7 +124,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
   private static String userName = "user1";
 
   static class VisibilityGenerator extends Generator {
-    private static final Log LOG = LogFactory.getLog(VisibilityGenerator.class);
+    private static final Logger LOG = LoggerFactory.getLogger(VisibilityGenerator.class);
 
     @Override
     protected void createSchema() throws IOException {
@@ -162,7 +163,8 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
             AccessControlClient.grant(ConnectionFactory.createConnection(getConf()), tableName,
                 USER.getShortName(), null, null, actions);
           } catch (Throwable e) {
-            LOG.fatal("Error in granting permission for the user " + USER.getShortName(), e);
+            LOG.error(HBaseMarkers.FATAL, "Error in granting permission for the user " +
+                USER.getShortName(), e);
             throw new IOException(e);
           }
         }
@@ -239,7 +241,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
   }
 
   static class Copier extends Configured implements Tool {
-    private static final Log LOG = LogFactory.getLog(Copier.class);
+    private static final Logger LOG = LoggerFactory.getLogger(Copier.class);
     private TableName tableName;
     private int labelIndex;
     private boolean delete;
@@ -395,7 +397,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
   }
 
   static class VisibilityVerify extends Verify {
-    private static final Log LOG = LogFactory.getLog(VisibilityVerify.class);
+    private static final Logger LOG = LoggerFactory.getLogger(VisibilityVerify.class);
     private TableName tableName;
     private int labelIndex;
 
@@ -475,7 +477,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
 
   static class VisibilityLoop extends Loop {
     private static final int SLEEP_IN_MS = 5000;
-    private static final Log LOG = LogFactory.getLog(VisibilityLoop.class);
+    private static final Logger LOG = LoggerFactory.getLogger(VisibilityLoop.class);
     IntegrationTestBigLinkedListWithVisibility it;
 
     @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
index ce86fc2..f47ef50 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
@@ -27,8 +27,6 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -84,7 +82,8 @@ import java.util.TreeSet;
 import java.util.concurrent.atomic.AtomicInteger;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 
 /**
@@ -107,7 +106,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 @Category(IntegrationTests.class)
 public class IntegrationTestLoadAndVerify  extends IntegrationTestBase  {
 
-  private static final Log LOG = LogFactory.getLog(IntegrationTestLoadAndVerify.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestLoadAndVerify.class);
 
   private static final String TEST_NAME = "IntegrationTestLoadAndVerify";
   private static final byte[] TEST_FAMILY = Bytes.toBytes("f1");
@@ -426,7 +425,7 @@ public void cleanUpCluster() throws Exception {
           try {
             LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey());
           } catch (IOException|InterruptedException e) {
-            LOG.warn(e);
+            LOG.warn(e.toString(), e);
           }
           if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) {
             context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java
index 34af01b..333232e 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.test;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
 import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
@@ -38,6 +36,8 @@ import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -165,7 +165,7 @@ public class IntegrationTestReplication extends IntegrationTestBigLinkedList {
    * {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList.Loop}
    */
   protected class VerifyReplicationLoop extends Configured implements Tool {
-    private final Log LOG = LogFactory.getLog(VerifyReplicationLoop.class);
+    private final Logger LOG = LoggerFactory.getLogger(VerifyReplicationLoop.class);
     protected ClusterID source;
     protected ClusterID sink;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java
index 7337423..52f6566 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java
@@ -27,8 +27,6 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.lang3.RandomUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HRegionLocation;
@@ -53,7 +51,8 @@ import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Assert;
 import org.junit.experimental.categories.Category;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 
 /**
@@ -95,7 +94,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 @Category(IntegrationTests.class)
 public class IntegrationTestTimeBoundedRequestsWithRegionReplicas extends IntegrationTestIngest {
 
-  private static final Log LOG = LogFactory.getLog(
+  private static final Logger LOG = LoggerFactory.getLogger(
     IntegrationTestTimeBoundedRequestsWithRegionReplicas.class);
 
   private static final String TEST_NAME

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java
index 09cbda3..6629f89 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestZKAndFSPermissions.java
@@ -26,8 +26,6 @@ import java.io.IOException;
 import java.util.List;
 
 import org.apache.commons.cli.CommandLine;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -51,6 +49,8 @@ import org.apache.zookeeper.data.ACL;
 import org.apache.zookeeper.data.Id;
 import org.apache.zookeeper.data.Stat;
 import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * An integration test which checks that the znodes in zookeeper and data in the FileSystem
@@ -68,8 +68,9 @@ import org.junit.experimental.categories.Category;
  */
 @Category(IntegrationTests.class)
 public class IntegrationTestZKAndFSPermissions extends AbstractHBaseTool {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(IntegrationTestZKAndFSPermissions.class);
 
-  private static final Log LOG = LogFactory.getLog(IntegrationTestZKAndFSPermissions.class);
   private String superUser;
   private String masterPrincipal;
   private boolean isForce;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 85760e8..645c0e1 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -261,8 +261,8 @@
       <artifactId>commons-lang3</artifactId>
     </dependency>
     <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.zookeeper</groupId>

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
index 91ef714..b0674bf 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
@@ -19,12 +19,11 @@
 package org.apache.hadoop.hbase.mapred;
 
 import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.RegionLocator;
@@ -44,28 +43,30 @@ import org.apache.hadoop.mapred.Partitioner;
 @InterfaceAudience.Public
 public class HRegionPartitioner<K2,V2>
 implements Partitioner<ImmutableBytesWritable, V2> {
-  private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HRegionPartitioner.class);
   // Connection and locator are not cleaned up; they just die when partitioner is done.
   private Connection connection;
   private RegionLocator locator;
   private byte[][] startKeys;
 
+  @Override
   public void configure(JobConf job) {
     try {
       this.connection = ConnectionFactory.createConnection(HBaseConfiguration.create(job));
       TableName tableName = TableName.valueOf(job.get(TableOutputFormat.OUTPUT_TABLE));
       this.locator = this.connection.getRegionLocator(tableName);
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.toString(), e);
     }
 
     try {
       this.startKeys = this.locator.getStartKeys();
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.toString(), e);
     }
   }
 
+  @Override
   public int getPartition(ImmutableBytesWritable key, V2 value, int numPartitions) {
     byte[] region = null;
     // Only one region return 0
@@ -77,7 +78,7 @@ implements Partitioner<ImmutableBytesWritable, V2> {
       // here if a region splits while mapping
       region = locator.getRegionLocation(key.get()).getRegionInfo().getStartKey();
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.toString(), e);
     }
     for (int i = 0; i < this.startKeys.length; i++){
       if (Bytes.compareTo(region, this.startKeys[i]) == 0 ){

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
index 3460fe7..ba1df4c 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.mapred;
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.mapred.MapReduceBase;
@@ -38,8 +38,8 @@ public class IdentityTableReduce
 extends MapReduceBase
 implements TableReduce<ImmutableBytesWritable, Put> {
   @SuppressWarnings("unused")
-  private static final Log LOG =
-    LogFactory.getLog(IdentityTableReduce.class.getName());
+  private static final Logger LOG =
+    LoggerFactory.getLogger(IdentityTableReduce.class.getName());
 
   /**
    * No aggregation, output pairs of (key, record)

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
index cbd7236..d9bb66b 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.mapred;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Connection;
@@ -39,7 +39,7 @@ import org.apache.hadoop.util.StringUtils;
 @InterfaceAudience.Public
 public class TableInputFormat extends TableInputFormatBase implements
     JobConfigurable {
-  private static final Log LOG = LogFactory.getLog(TableInputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TableInputFormat.class);
 
   /**
    * space delimited list of columns

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
index 48ee763..509972e 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.mapred;
 import java.io.Closeable;
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Connection;
@@ -79,7 +79,7 @@ import org.apache.hadoop.mapred.Reporter;
 @InterfaceAudience.Public
 public abstract class TableInputFormatBase
 implements InputFormat<ImmutableBytesWritable, Result> {
-  private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TableInputFormatBase.class);
   private byte [][] inputColumns;
   private Table table;
   private RegionLocator regionLocator;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
index 95be24f..a49d0ec 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
@@ -19,10 +19,9 @@
 package org.apache.hadoop.hbase.mapred;
 
 import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -43,7 +42,7 @@ import static org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl.LOG_PER_RO
  */
 @InterfaceAudience.Public
 public class TableRecordReaderImpl {
-  private static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TableRecordReaderImpl.class);
 
   private byte [] startRow;
   private byte [] endRow;
@@ -248,7 +247,7 @@ public class TableRecordReaderImpl {
         long now = System.currentTimeMillis();
         LOG.info("Mapper took " + (now-timestamp)
           + "ms to process " + rowcount + " rows");
-        LOG.info(ioe);
+        LOG.info(ioe.toString(), ioe);
         String lastRow = lastSuccessfulRow == null ?
           "null" : Bytes.toStringBinary(lastSuccessfulRow);
         LOG.info("lastSuccessfulRow=" + lastRow);

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
index 48cc0d5..6d6125f 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hbase.mapreduce;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
@@ -72,8 +72,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
  */
 @InterfaceAudience.Public
 public class CellCounter extends Configured implements Tool {
-  private static final Log LOG =
-    LogFactory.getLog(CellCounter.class.getName());
+  private static final Logger LOG =
+    LoggerFactory.getLogger(CellCounter.class.getName());
 
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
index 81af165..2e9e62c 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
@@ -23,8 +23,6 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -32,6 +30,8 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -49,7 +49,7 @@ import org.apache.hadoop.util.ToolRunner;
  */
 @InterfaceAudience.Public
 public class CopyTable extends Configured implements Tool {
-  private static final Log LOG = LogFactory.getLog(CopyTable.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CopyTable.class);
 
   final static String NAME = "copytable";
   long startTime = 0;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java
index 775739f..07f05dd 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java
@@ -26,12 +26,12 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.Tag;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Result;
@@ -50,7 +50,8 @@ import org.apache.hadoop.hbase.util.Bytes;
  */
 @InterfaceAudience.Private
 public class DefaultVisibilityExpressionResolver implements VisibilityExpressionResolver {
-  private static final Log LOG = LogFactory.getLog(DefaultVisibilityExpressionResolver.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DefaultVisibilityExpressionResolver.class);
 
   private Configuration conf;
   private final Map<String, Integer> labels = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
index 7107537..34f3398 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java
@@ -21,14 +21,15 @@ package org.apache.hadoop.hbase.mapreduce;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.Filter;
@@ -48,7 +49,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
  */
 @InterfaceAudience.Private
 public final class ExportUtils {
-  private static final Log LOG = LogFactory.getLog(ExportUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ExportUtils.class);
   public static final String RAW_SCAN = "hbase.mapreduce.include.deleted.rows";
   public static final String EXPORT_BATCHING = "hbase.export.scanner.batch";
   public static final String EXPORT_CACHING = "hbase.export.scanner.caching";

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index ffe1c85..9bd0530 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -40,8 +40,6 @@ import java.util.function.Function;
 import java.util.stream.Collectors;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -91,6 +89,8 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
 /**
@@ -105,7 +105,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
 @InterfaceAudience.Public
 public class HFileOutputFormat2
     extends FileOutputFormat<ImmutableBytesWritable, Cell> {
-  private static final Log LOG = LogFactory.getLog(HFileOutputFormat2.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HFileOutputFormat2.class);
   static class TableInfo {
     private TableDescriptor tableDesctiptor;
     private RegionLocator regionLocator;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
index d8c2314..b48ecf0 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
@@ -19,10 +19,9 @@
 package org.apache.hadoop.hbase.mapreduce;
 
 import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -53,7 +52,7 @@ public class HRegionPartitioner<KEY, VALUE>
 extends Partitioner<ImmutableBytesWritable, VALUE>
 implements Configurable {
 
-  private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HRegionPartitioner.class);
   private Configuration conf = null;
   // Connection and locator are not cleaned up; they just die when partitioner is done.
   private Connection connection;
@@ -86,7 +85,7 @@ implements Configurable {
       // here if a region splits while mapping
       region = this.locator.getRegionLocation(key.get()).getRegionInfo().getStartKey();
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.toString(), e);
     }
     for (int i = 0; i < this.startKeys.length; i++){
       if (Bytes.compareTo(region, this.startKeys[i]) == 0 ){
@@ -129,12 +128,12 @@ implements Configurable {
       TableName tableName = TableName.valueOf(conf.get(TableOutputFormat.OUTPUT_TABLE));
       this.locator = this.connection.getRegionLocator(tableName);
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.toString(), e);
     }
     try {
       this.startKeys = this.locator.getStartKeys();
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.toString(), e);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
index 2c8caf5..e68ac3b 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java
@@ -27,8 +27,6 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Properties;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -56,14 +54,15 @@ import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Charsets;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.Ordering;
 
 public class HashTable extends Configured implements Tool {
 
-  private static final Log LOG = LogFactory.getLog(HashTable.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HashTable.class);
 
   private static final int DEFAULT_BATCH_SIZE = 8000;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
index 76c1f60..876953c 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.mapreduce;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.io.Writable;
 
@@ -53,7 +53,7 @@ public class IdentityTableReducer
 extends TableReducer<Writable, Mutation, Writable> {
 
   @SuppressWarnings("unused")
-  private static final Log LOG = LogFactory.getLog(IdentityTableReducer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(IdentityTableReducer.class);
 
   /**
    * Writes each given record, consisting of the row key and the given values,

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
index bf0081b..e8e1de0 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
@@ -33,8 +33,6 @@ import java.util.Map;
 import java.util.TreeMap;
 import java.util.UUID;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -79,6 +77,8 @@ import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
@@ -86,7 +86,7 @@ import org.apache.zookeeper.KeeperException;
  */
 @InterfaceAudience.Public
 public class Import extends Configured implements Tool {
-  private static final Log LOG = LogFactory.getLog(Import.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Import.class);
   final static String NAME = "import";
   public final static String CF_RENAME_PROP = "HBASE_IMPORTER_RENAME_CFS";
   public final static String BULK_OUTPUT_CONF_KEY = "import.bulk.output";
@@ -192,7 +192,7 @@ public class Import extends Configured implements Tool {
       extends TableMapper<CellWritableComparable, Cell> {
     private Map<byte[], byte[]> cfRenameMap;
     private Filter filter;
-    private static final Log LOG = LogFactory.getLog(CellImporter.class);
+    private static final Logger LOG = LoggerFactory.getLogger(CellImporter.class);
 
     /**
      * @param row  The current table row key.
@@ -256,7 +256,7 @@ public class Import extends Configured implements Tool {
   public static class CellImporter extends TableMapper<ImmutableBytesWritable, Cell> {
     private Map<byte[], byte[]> cfRenameMap;
     private Filter filter;
-    private static final Log LOG = LogFactory.getLog(CellImporter.class);
+    private static final Logger LOG = LoggerFactory.getLogger(CellImporter.class);
 
     /**
      * @param row  The current table row key.

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
index d672803..678377d 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
@@ -27,8 +27,6 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
@@ -40,6 +38,8 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 @InterfaceAudience.Public
 public class ImportTsv extends Configured implements Tool {
 
-  protected static final Log LOG = LogFactory.getLog(ImportTsv.class);
+  protected static final Logger LOG = LoggerFactory.getLogger(ImportTsv.class);
 
   final static String NAME = "importtsv";
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
index b7e9479..03834f2 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java
@@ -18,9 +18,9 @@
 
 package org.apache.hadoop.hbase.mapreduce;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
 @InterfaceAudience.Public
 @VisibleForTesting
 public class MultiTableHFileOutputFormat extends HFileOutputFormat2 {
-  private static final Log LOG = LogFactory.getLog(MultiTableHFileOutputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MultiTableHFileOutputFormat.class);
 
   /**
    * Creates a composite key to use as a mapper output key when using

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
index 82a86b4..d8205c1 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
@@ -22,9 +22,9 @@ import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.TableName;
@@ -55,7 +55,7 @@ import java.util.Iterator;
 public abstract class MultiTableInputFormatBase extends
     InputFormat<ImmutableBytesWritable, Result> {
 
-  private static final Log LOG = LogFactory.getLog(MultiTableInputFormatBase.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MultiTableInputFormatBase.class);
 
   /** Holds the set of scans used to define the input. */
   private List<Scan> scans;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
index 4cf50f2..2a4fae9 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
@@ -22,9 +22,9 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.TableName;
@@ -72,7 +72,7 @@ public class MultiTableOutputFormat extends OutputFormat<ImmutableBytesWritable,
    */
   protected static class MultiTableRecordWriter extends
       RecordWriter<ImmutableBytesWritable, Mutation> {
-    private static final Log LOG = LogFactory.getLog(MultiTableRecordWriter.class);
+    private static final Logger LOG = LoggerFactory.getLogger(MultiTableRecordWriter.class);
     Connection connection;
     Map<ImmutableBytesWritable, BufferedMutator> mutatorMap = new HashMap<>();
     Configuration conf;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java
index b5cba64..97fafce 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java
@@ -18,28 +18,29 @@
 
 package org.apache.hadoop.hbase.mapreduce;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.IOException;
+import java.util.AbstractMap;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
 import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
 import org.apache.hadoop.hbase.util.ConfigurationUtil;
 import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.yetus.audience.InterfaceStability;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.AbstractMap;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
 
 /**
  * Shared implementation of mapreduce code over multiple table snapshots.
@@ -50,8 +51,8 @@ import java.util.UUID;
 @InterfaceAudience.LimitedPrivate({ "HBase" })
 @InterfaceStability.Evolving
 public class MultiTableSnapshotInputFormatImpl {
-
-  private static final Log LOG = LogFactory.getLog(MultiTableSnapshotInputFormatImpl.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MultiTableSnapshotInputFormatImpl.class);
 
   public static final String RESTORE_DIRS_KEY =
       "hbase.MultiTableSnapshotInputFormat.restore.snapshotDirMapping";

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
index a505379..626deff 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
@@ -23,8 +23,6 @@ import java.lang.reflect.Method;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -41,6 +39,8 @@ import org.apache.hadoop.mapreduce.StatusReporter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
@@ -58,7 +58,7 @@ import org.apache.hadoop.util.ReflectionUtils;
  */
 
 public class MultithreadedTableMapper<K2, V2> extends TableMapper<K2, V2> {
-  private static final Log LOG = LogFactory.getLog(MultithreadedTableMapper.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MultithreadedTableMapper.class);
   private Class<? extends Mapper<ImmutableBytesWritable, Result,K2,V2>> mapClass;
   private Context outer;
   private ExecutorService executor;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java
index 7da2f9b..317b328 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java
@@ -23,9 +23,9 @@ import java.util.List;
 import java.util.Map.Entry;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
@@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.Reducer;
  */
 @InterfaceAudience.Public
 public class PutCombiner<K> extends Reducer<K, Put, K, Put> {
-  private static final Log LOG = LogFactory.getLog(PutCombiner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PutCombiner.class);
 
   @Override
   protected void reduce(K row, Iterable<Put> vals, Context context)

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java
index 33f09cf..d1c5fc2 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RegionSizeCalculator.java
@@ -24,17 +24,18 @@ import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.RegionLoad;
 import org.apache.hadoop.hbase.ServerName;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
 
 /**
  * Computes size of each region for given table and given column families.
@@ -43,7 +44,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 @InterfaceAudience.Private
 public class RegionSizeCalculator {
 
-  private static final Log LOG = LogFactory.getLog(RegionSizeCalculator.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RegionSizeCalculator.class);
 
   /**
    * Maps each region to its size in bytes.

http://git-wip-us.apache.org/repos/asf/hbase/blob/f572c4b8/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
index c9f3022..dac1d42 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java
@@ -25,13 +25,13 @@ import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
@@ -42,7 +42,7 @@ import org.apache.hadoop.io.serializer.Serializer;
 
 @InterfaceAudience.Public
 public class ResultSerialization extends Configured implements Serialization<Result> {
-  private static final Log LOG = LogFactory.getLog(ResultSerialization.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ResultSerialization.class);
   // The following configuration property indicates import file format version.
   public static final String IMPORT_FORMAT_VER = "hbase.import.version";
 


Mime
View raw message