hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From wan...@apache.org
Subject [12/50] [abbrv] hadoop git commit: MAPREDUCE-6998. Moving logging APIs over to slf4j in hadoop-mapreduce-client-jobclient. Contributed by Gergely Novák.
Date Sun, 10 Dec 2017 21:54:45 GMT
MAPREDUCE-6998. Moving logging APIs over to slf4j in hadoop-mapreduce-client-jobclient. Contributed by Gergely Novák.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d4cae977
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d4cae977
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d4cae977

Branch: refs/heads/YARN-6592
Commit: d4cae977a2471ad7b8f803617e41b6f94df19c11
Parents: 6cca5b3
Author: Akira Ajisaka <aajisaka@apache.org>
Authored: Thu Dec 7 16:21:25 2017 +0900
Committer: Akira Ajisaka <aajisaka@apache.org>
Committed: Thu Dec 7 16:21:25 2017 +0900

----------------------------------------------------------------------
 .../java/org/apache/hadoop/mapred/ClientCache.java   |  6 +++---
 .../apache/hadoop/mapred/ClientServiceDelegate.java  |  7 ++++---
 .../apache/hadoop/mapred/ResourceMgrDelegate.java    |  7 ++++---
 .../java/org/apache/hadoop/mapred/YARNRunner.java    |  6 +++---
 .../java/org/apache/hadoop/fi/ProbabilityModel.java  |  7 ++++---
 .../org/apache/hadoop/fs/AccumulatingReducer.java    |  8 +++++---
 .../test/java/org/apache/hadoop/fs/DFSCIOTest.java   |  6 +++---
 .../org/apache/hadoop/fs/DistributedFSCheck.java     |  7 ++++---
 .../java/org/apache/hadoop/fs/JHLogAnalyzer.java     |  7 ++++---
 .../test/java/org/apache/hadoop/fs/TestDFSIO.java    |  6 +++---
 .../src/test/java/org/apache/hadoop/fs/TestJHLA.java |  7 ++++---
 .../hadoop/fs/loadGenerator/LoadGeneratorMR.java     |  6 +++---
 .../java/org/apache/hadoop/fs/slive/AppendOp.java    |  6 +++---
 .../org/apache/hadoop/fs/slive/ConfigExtractor.java  |  7 ++++---
 .../java/org/apache/hadoop/fs/slive/CreateOp.java    |  6 +++---
 .../java/org/apache/hadoop/fs/slive/DeleteOp.java    |  6 +++---
 .../test/java/org/apache/hadoop/fs/slive/ListOp.java |  6 +++---
 .../java/org/apache/hadoop/fs/slive/MkdirOp.java     |  6 +++---
 .../test/java/org/apache/hadoop/fs/slive/ReadOp.java |  6 +++---
 .../java/org/apache/hadoop/fs/slive/RenameOp.java    |  6 +++---
 .../org/apache/hadoop/fs/slive/ReportWriter.java     |  6 +++---
 .../java/org/apache/hadoop/fs/slive/SleepOp.java     |  6 +++---
 .../java/org/apache/hadoop/fs/slive/SliveMapper.java |  6 +++---
 .../org/apache/hadoop/fs/slive/SliveReducer.java     |  6 +++---
 .../java/org/apache/hadoop/fs/slive/SliveTest.java   |  6 +++---
 .../java/org/apache/hadoop/fs/slive/TestSlive.java   | 10 +++++-----
 .../java/org/apache/hadoop/fs/slive/TruncateOp.java  |  6 +++---
 .../org/apache/hadoop/fs/slive/WeightSelector.java   |  7 ++++---
 .../test/java/org/apache/hadoop/hdfs/NNBench.java    |  7 +++----
 .../org/apache/hadoop/hdfs/NNBenchWithoutMR.java     |  8 ++++----
 .../java/org/apache/hadoop/mapred/BigMapOutput.java  |  8 ++++----
 .../test/java/org/apache/hadoop/mapred/MRBench.java  |  6 +++---
 .../java/org/apache/hadoop/mapred/MiniMRCluster.java | 15 ++++++++-------
 .../hadoop/mapred/MiniMRYarnClusterAdapter.java      |  7 ++++---
 .../org/apache/hadoop/mapred/ReliabilityTest.java    | 13 +++++++------
 .../org/apache/hadoop/mapred/TestBadRecords.java     |  8 ++++----
 .../org/apache/hadoop/mapred/TestClientRedirect.java |  7 ++++---
 .../hadoop/mapred/TestCombineFileInputFormat.java    | 11 +++++------
 .../mapred/TestCombineSequenceFileInputFormat.java   |  8 ++++----
 .../hadoop/mapred/TestCombineTextInputFormat.java    |  8 ++++----
 .../mapred/TestConcatenatedCompressedInput.java      |  9 +++++----
 .../hadoop/mapred/TestFixedLengthInputFormat.java    | 11 ++++++-----
 .../org/apache/hadoop/mapred/TestJobCleanup.java     |  9 +++++----
 .../apache/hadoop/mapred/TestJobSysDirWithDFS.java   |  8 ++++----
 .../hadoop/mapred/TestKeyValueTextInputFormat.java   |  8 +++++---
 .../hadoop/mapred/TestMRTimelineEventHandling.java   |  8 ++++----
 .../org/apache/hadoop/mapred/TestMapProgress.java    |  8 +++++---
 .../apache/hadoop/mapred/TestMiniMRChildTask.java    |  8 ++++----
 .../hadoop/mapred/TestMultiFileInputFormat.java      |  8 +++++---
 .../mapred/TestSequenceFileAsBinaryOutputFormat.java | 13 ++++++-------
 .../org/apache/hadoop/mapred/TestSortedRanges.java   | 12 ++++++------
 .../mapred/TestSpecialCharactersInOutputPath.java    | 15 +++++++--------
 .../org/apache/hadoop/mapred/TestTaskStatus.java     |  4 +---
 .../apache/hadoop/mapred/TestTextInputFormat.java    | 11 +++++------
 .../org/apache/hadoop/mapred/TestYARNRunner.java     | 14 ++++++++------
 .../apache/hadoop/mapred/ThreadedMapBenchmark.java   |  7 ++++---
 .../java/org/apache/hadoop/mapred/UtilsForTests.java |  6 +++---
 .../mapred/jobcontrol/TestLocalJobControl.java       |  9 +++++----
 .../org/apache/hadoop/mapred/pipes/TestPipes.java    |  8 ++++----
 .../org/apache/hadoop/mapreduce/GrowingSleepJob.java | 12 ++++++------
 .../hadoop/mapreduce/JobHistoryFileParser.java       |  7 ++++---
 .../hadoop/mapreduce/JobHistoryFileReplayHelper.java |  9 ++++-----
 .../mapreduce/JobHistoryFileReplayMapperV1.java      |  9 ++++-----
 .../mapreduce/JobHistoryFileReplayMapperV2.java      |  8 ++++----
 .../apache/hadoop/mapreduce/MapReduceTestUtil.java   | 13 ++++---------
 .../hadoop/mapreduce/MiniHadoopClusterManager.java   |  8 ++++----
 .../hadoop/mapreduce/SimpleEntityWriterV1.java       |  7 ++++---
 .../hadoop/mapreduce/SimpleEntityWriterV2.java       |  9 +++++----
 .../org/apache/hadoop/mapreduce/TestCounters.java    | 11 +++++------
 .../org/apache/hadoop/mapreduce/TestLocalRunner.java | 11 ++++++-----
 .../org/apache/hadoop/mapreduce/TestMRJobClient.java |  7 ++++---
 .../apache/hadoop/mapreduce/TestMapCollection.java   | 13 ++++++-------
 .../apache/hadoop/mapreduce/TestValueIterReset.java  |  8 ++++----
 .../hadoop/mapreduce/TimelineEntityConverterV1.java  |  8 ++++----
 .../hadoop/mapreduce/TimelineEntityConverterV2.java  |  8 ++++----
 .../lib/db/TestDataDrivenDBInputFormat.java          |  6 +++---
 .../input/TestCombineSequenceFileInputFormat.java    |  8 ++++----
 .../lib/input/TestCombineTextInputFormat.java        |  8 ++++----
 .../lib/input/TestFixedLengthInputFormat.java        | 12 ++++++------
 .../lib/input/TestMRKeyValueTextInputFormat.java     | 12 +++++-------
 .../lib/input/TestMRSequenceFileInputFilter.java     |  8 ++++----
 .../lib/jobcontrol/TestMapReduceJobControl.java      | 12 ++++++------
 .../TestMRSequenceFileAsBinaryOutputFormat.java      |  8 ++++----
 .../mapreduce/lib/partition/TestKeyFieldHelper.java  | 10 ++++++----
 .../hadoop/mapreduce/security/TestJHSSecurity.java   | 10 +++++-----
 .../hadoop/mapreduce/util/MRAsyncDiskService.java    |  7 ++++---
 .../mapreduce/util/TestMRAsyncDiskService.java       |  9 ++++-----
 .../hadoop/mapreduce/v2/MiniMRYarnCluster.java       |  7 ++++---
 .../v2/TestMRAMWithNonNormalizedCapabilities.java    |  7 ++++---
 .../hadoop/mapreduce/v2/TestMRAppWithCombiner.java   |  7 ++++---
 .../org/apache/hadoop/mapreduce/v2/TestMRJobs.java   |  6 +++---
 .../mapreduce/v2/TestMRJobsWithHistoryService.java   |  8 ++++----
 .../hadoop/mapreduce/v2/TestMRJobsWithProfiler.java  |  8 ++++----
 .../apache/hadoop/mapreduce/v2/TestMROldApiJobs.java |  7 ++++---
 .../org/apache/hadoop/mapreduce/v2/TestRMNMInfo.java |  7 ++++---
 .../mapreduce/v2/TestSpeculativeExecution.java       |  7 ++++---
 .../org/apache/hadoop/mapreduce/v2/TestUberAM.java   |  6 +++---
 .../src/test/java/testjar/UserNamePermission.java    | 13 ++++---------
 98 files changed, 411 insertions(+), 394 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java
index 93ea5c4..8268d1e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java
@@ -23,8 +23,6 @@ import java.security.PrivilegedAction;
 import java.util.HashMap;
 import java.util.Map;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.mapreduce.JobID;
@@ -35,13 +33,15 @@ import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class ClientCache {
 
   private final Configuration conf;
   private final ResourceMgrDelegate rm;
 
-  private static final Log LOG = LogFactory.getLog(ClientCache.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ClientCache.class);
 
   private Map<JobID, ClientServiceDelegate> cache = 
       new HashMap<JobID, ClientServiceDelegate>();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
index 72339e5..792e496 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
@@ -29,8 +29,6 @@ import java.util.List;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.ipc.RPC;
@@ -79,11 +77,14 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
 import org.apache.hadoop.yarn.security.client.ClientToAMTokenIdentifier;
 import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 
 public class ClientServiceDelegate {
-  private static final Log LOG = LogFactory.getLog(ClientServiceDelegate.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ClientServiceDelegate.class);
   private static final String UNAVAILABLE = "N/A";
 
   // Caches for per-user NotRunningJobs

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java
index 94f741a..ac4b73b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java
@@ -25,8 +25,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -78,11 +76,14 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
 import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 
 public class ResourceMgrDelegate extends YarnClient {
-  private static final Log LOG = LogFactory.getLog(ResourceMgrDelegate.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ResourceMgrDelegate.class);
       
   private YarnConfiguration conf;
   private ApplicationSubmissionContext application;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
index 12a3079..127e1dc 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
@@ -36,8 +36,6 @@ import java.util.Vector;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileContext;
@@ -99,6 +97,8 @@ import org.apache.hadoop.yarn.security.client.RMDelegationTokenSelector;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.apache.hadoop.yarn.util.UnitsConversionUtil;
 import org.apache.hadoop.yarn.util.resource.ResourceUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -108,7 +108,7 @@ import com.google.common.annotations.VisibleForTesting;
 @SuppressWarnings("unchecked")
 public class YARNRunner implements ClientProtocol {
 
-  private static final Log LOG = LogFactory.getLog(YARNRunner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(YARNRunner.class);
 
   private static final String RACK_GROUP = "rack";
   private static final String NODE_IF_RACK_GROUP = "node1";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fi/ProbabilityModel.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fi/ProbabilityModel.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fi/ProbabilityModel.java
index 48e57f5..ce3f1fa 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fi/ProbabilityModel.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fi/ProbabilityModel.java
@@ -19,9 +19,9 @@ package org.apache.hadoop.fi;
 
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is responsible for the decision of when a fault 
@@ -42,7 +42,8 @@ import org.apache.hadoop.conf.Configuration;
  */
 public class ProbabilityModel {
   private static Random generator = new Random();
-  private static final Log LOG = LogFactory.getLog(ProbabilityModel.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ProbabilityModel.class);
 
   static final String FPROB_NAME = "fi.";
   private static final String ALL_PROBABILITIES = FPROB_NAME + "*";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java
index 3991d94..f6c2a06 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java
@@ -20,10 +20,10 @@ package org.apache.hadoop.fs;
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Reducer that accumulates values based on their type.
@@ -47,7 +47,9 @@ public class AccumulatingReducer extends MapReduceBase
   static final String VALUE_TYPE_LONG = "l:";
   static final String VALUE_TYPE_FLOAT = "f:";
   static final String VALUE_TYPE_STRING = "s:";
-  private static final Log LOG = LogFactory.getLog(AccumulatingReducer.class);
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AccumulatingReducer.class);
   
   protected String hostName;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
index b01954e..d718556 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java
@@ -28,8 +28,6 @@ import java.io.PrintStream;
 import java.util.Date;
 import java.util.StringTokenizer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.SequenceFile;
@@ -38,6 +36,8 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.*;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
  /**
  * Distributed i/o benchmark.
@@ -69,7 +69,7 @@ import org.junit.Test;
 @Ignore
 public class DFSCIOTest {
   // Constants
-  private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DFSCIOTest.class);
   private static final int TEST_TYPE_READ = 0;
   private static final int TEST_TYPE_WRITE = 1;
   private static final int TEST_TYPE_CLEANUP = 2;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DistributedFSCheck.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DistributedFSCheck.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DistributedFSCheck.java
index 67ef5d9..8ed2c19 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DistributedFSCheck.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DistributedFSCheck.java
@@ -33,8 +33,6 @@ import java.util.Vector;
 
 import junit.framework.TestCase;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.SequenceFile;
@@ -42,6 +40,8 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.*;
 import org.junit.Ignore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Distributed checkup of the file system consistency.
@@ -56,7 +56,8 @@ import org.junit.Ignore;
 @Ignore
 public class DistributedFSCheck extends TestCase {
   // Constants
-  private static final Log LOG = LogFactory.getLog(DistributedFSCheck.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DistributedFSCheck.class);
   private static final int TEST_TYPE_READ = 0;
   private static final int TEST_TYPE_CLEANUP = 2;
   private static final int DEFAULT_BUFFER_SIZE = 1000000;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java
index 91c3c26..5e3e745 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java
@@ -34,8 +34,6 @@ import java.util.Map;
 import java.util.StringTokenizer;
 import java.util.HashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.SequenceFile;
@@ -46,6 +44,8 @@ import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Job History Log Analyzer.
@@ -144,7 +144,8 @@ import org.apache.hadoop.util.StringUtils;
  */
 @SuppressWarnings("deprecation")
 public class JHLogAnalyzer {
-  private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(JHLogAnalyzer.class);
   // Constants
   private static final String JHLA_ROOT_DIR = 
                             System.getProperty("test.build.data", "stats/JHLA");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
index 61485be..68befea 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
@@ -33,8 +33,6 @@ import java.util.Collection;
 import java.util.Date;
 import java.util.Random;
 import java.util.StringTokenizer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
@@ -62,6 +60,8 @@ import org.apache.hadoop.util.ToolRunner;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Distributed i/o benchmark.
@@ -92,7 +92,7 @@ import org.junit.Test;
  */
 public class TestDFSIO implements Tool {
   // Constants
-  private static final Log LOG = LogFactory.getLog(TestDFSIO.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestDFSIO.class);
   private static final int DEFAULT_BUFFER_SIZE = 1000000;
   private static final String BASE_FILE_NAME = "test_io_";
   private static final String DEFAULT_RES_FILE_NAME = "TestDFSIO_results.log";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
index 31950fd..9334a8a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestJHLA.java
@@ -23,11 +23,11 @@ import java.io.FileOutputStream;
 import java.io.OutputStreamWriter;
 import java.io.File;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Test Job History Log Analyzer.
@@ -35,7 +35,8 @@ import org.junit.Test;
  * @see JHLogAnalyzer
  */
 public class TestJHLA {
-  private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(JHLogAnalyzer.class);
   private String historyLog = System.getProperty("test.build.data", 
                                   "build/test/data") + "/history/test.log";
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGeneratorMR.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGeneratorMR.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGeneratorMR.java
index c47d971..044c77c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGeneratorMR.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGeneratorMR.java
@@ -26,8 +26,6 @@ import java.net.UnknownHostException;
 import java.util.EnumSet;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.CreateFlag;
@@ -50,6 +48,8 @@ import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /** The load generator is a tool for testing NameNode behavior under
  * different client loads.
@@ -63,7 +63,7 @@ import org.apache.hadoop.util.ToolRunner;
  *
  */
 public class LoadGeneratorMR extends LoadGenerator {
-  public static final Log LOG = LogFactory.getLog(LoadGenerator.class);
+  public static final Logger LOG = LoggerFactory.getLogger(LoadGenerator.class);
   private static int numMapTasks = 1;
   private String mrOutDir;
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/AppendOp.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/AppendOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/AppendOp.java
index 82d221f..b118e6e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/AppendOp.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/AppendOp.java
@@ -24,12 +24,12 @@ import java.io.OutputStream;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput;
 import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Operation which selects a random file and appends a random amount of bytes
@@ -41,7 +41,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
  */
 class AppendOp extends Operation {
 
-  private static final Log LOG = LogFactory.getLog(AppendOp.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AppendOp.class);
 
   AppendOp(ConfigExtractor cfg, Random rnd) {
     super(AppendOp.class.getSimpleName(), cfg, rnd);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigExtractor.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigExtractor.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigExtractor.java
index ef4e436..2668770 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigExtractor.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ConfigExtractor.java
@@ -22,12 +22,12 @@ import java.text.NumberFormat;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.slive.Constants.OperationType;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Simple access layer onto of a configuration object that extracts the slive
@@ -35,7 +35,8 @@ import org.apache.hadoop.util.StringUtils;
  */
 class ConfigExtractor {
 
-  private static final Log LOG = LogFactory.getLog(ConfigExtractor.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ConfigExtractor.class);
 
   private Configuration config;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/CreateOp.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/CreateOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/CreateOp.java
index 7da32c7..d7d2989 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/CreateOp.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/CreateOp.java
@@ -22,13 +22,13 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput;
 import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Operation which selects a random file and a random number of bytes to create
@@ -42,7 +42,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
  */
 class CreateOp extends Operation {
 
-  private static final Log LOG = LogFactory.getLog(CreateOp.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CreateOp.class);
 
   private static int DEF_IO_BUFFER_SIZE = 4096;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/DeleteOp.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/DeleteOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/DeleteOp.java
index 31afd16..0ed425a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/DeleteOp.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/DeleteOp.java
@@ -23,11 +23,11 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Operation which selects a random file and attempts to delete that file (if it
@@ -39,7 +39,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
  */
 class DeleteOp extends Operation {
 
-  private static final Log LOG = LogFactory.getLog(DeleteOp.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DeleteOp.class);
 
   DeleteOp(ConfigExtractor cfg, Random rnd) {
     super(DeleteOp.class.getSimpleName(), cfg, rnd);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ListOp.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ListOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ListOp.java
index f83a6bd..46377d5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ListOp.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ListOp.java
@@ -23,12 +23,12 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Operation which selects a random directory and attempts to list that
@@ -41,7 +41,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
  */
 class ListOp extends Operation {
 
-  private static final Log LOG = LogFactory.getLog(ListOp.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ListOp.class);
 
   ListOp(ConfigExtractor cfg, Random rnd) {
     super(ListOp.class.getSimpleName(), cfg, rnd);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/MkdirOp.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/MkdirOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/MkdirOp.java
index 585f418..0f24d14 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/MkdirOp.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/MkdirOp.java
@@ -23,11 +23,11 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Operation which selects a random directory and attempts to create that
@@ -40,7 +40,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
  */
 class MkdirOp extends Operation {
 
-  private static final Log LOG = LogFactory.getLog(MkdirOp.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MkdirOp.class);
 
   MkdirOp(ConfigExtractor cfg, Random rnd) {
     super(MkdirOp.class.getSimpleName(), cfg, rnd);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReadOp.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReadOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReadOp.java
index 9683323..d6f29a6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReadOp.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReadOp.java
@@ -24,12 +24,12 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.slive.DataVerifier.VerifyOutput;
 import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Operation which selects a random file and selects a random read size (from
@@ -43,7 +43,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
  * number of failures and the amount of time taken to fail
  */
 class ReadOp extends Operation {
-  private static final Log LOG = LogFactory.getLog(ReadOp.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ReadOp.class);
 
   ReadOp(ConfigExtractor cfg, Random rnd) {
     super(ReadOp.class.getSimpleName(), cfg, rnd);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/RenameOp.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/RenameOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/RenameOp.java
index 94d6db4..a608a87 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/RenameOp.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/RenameOp.java
@@ -23,11 +23,11 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Operation which selects a random file and a second random file and attempts
@@ -60,7 +60,7 @@ class RenameOp extends Operation {
     }
   }
 
-  private static final Log LOG = LogFactory.getLog(RenameOp.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RenameOp.class);
 
   RenameOp(ConfigExtractor cfg, Random rnd) {
     super(RenameOp.class.getSimpleName(), cfg, rnd);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReportWriter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReportWriter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReportWriter.java
index 1f9abde..873a2ffd 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReportWriter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ReportWriter.java
@@ -24,8 +24,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class which provides a report for the given operation output
@@ -48,7 +48,7 @@ class ReportWriter {
   static final String NOT_FOUND = "files_not_found";
   static final String BAD_FILES = "bad_files";
 
-  private static final Log LOG = LogFactory.getLog(ReportWriter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ReportWriter.class);
 
   private static final String SECTION_DELIM = "-------------";
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SleepOp.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SleepOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SleepOp.java
index 9fc9b30..2b9d54e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SleepOp.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SleepOp.java
@@ -21,10 +21,10 @@ package org.apache.hadoop.fs.slive;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Operation which sleeps for a given number of milliseconds according to the
@@ -32,7 +32,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
  */
 class SleepOp extends Operation {
 
-  private static final Log LOG = LogFactory.getLog(SleepOp.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SleepOp.class);
 
   SleepOp(ConfigExtractor cfg, Random rnd) {
     super(SleepOp.class.getSimpleName(), cfg, rnd);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveMapper.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveMapper.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveMapper.java
index e02c5d9..93ee9f9 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveMapper.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveMapper.java
@@ -22,8 +22,6 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
 import org.apache.hadoop.io.Text;
@@ -35,6 +33,8 @@ import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The slive class which sets up the mapper to be used which itself will receive
@@ -45,7 +45,7 @@ import org.apache.hadoop.util.StringUtils;
 public class SliveMapper extends MapReduceBase implements
     Mapper<Object, Object, Text, Text> {
 
-  private static final Log LOG = LogFactory.getLog(SliveMapper.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SliveMapper.class);
 
   private static final String OP_TYPE = SliveMapper.class.getSimpleName();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveReducer.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveReducer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveReducer.java
index 323559c..d4f2473 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveReducer.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveReducer.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.fs.slive;
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MapReduceBase;
@@ -30,6 +28,8 @@ import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The slive reducer which iterates over the given input values and merges them
@@ -38,7 +38,7 @@ import org.apache.hadoop.util.StringUtils;
 public class SliveReducer extends MapReduceBase implements
     Reducer<Text, Text, Text, Text> {
 
-  private static final Log LOG = LogFactory.getLog(SliveReducer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SliveReducer.class);
 
   private ConfigExtractor config;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
index 97360d6..c2918ab 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java
@@ -30,8 +30,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -45,6 +43,8 @@ import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Slive test entry point + main program
@@ -61,7 +61,7 @@ import org.apache.hadoop.util.ToolRunner;
  */
 public class SliveTest implements Tool {
 
-  private static final Log LOG = LogFactory.getLog(SliveTest.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SliveTest.class);
 
   // ensures the hdfs configurations are loaded if they exist
   static {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
index 25e3340..575cd0b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
@@ -31,8 +31,6 @@ import java.util.List;
 import java.util.Random;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -43,13 +41,15 @@ import org.apache.hadoop.fs.slive.DataWriter.GenerateOutput;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Before;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Junit 4 test for slive
  */
 public class TestSlive {
 
-  private static final Log LOG = LogFactory.getLog(TestSlive.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestSlive.class);
 
   private static final Random rnd = new Random(1L);
 
@@ -258,13 +258,13 @@ public class TestSlive {
     DataWriter writer = new DataWriter(rnd);
     FileOutputStream fs = new FileOutputStream(fn);
     GenerateOutput ostat = writer.writeSegment(byteAm, fs);
-    LOG.info(ostat);
+    LOG.info(ostat.toString());
     fs.close();
     assertTrue(ostat.getBytesWritten() == byteAm);
     DataVerifier vf = new DataVerifier();
     FileInputStream fin = new FileInputStream(fn);
     VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
-    LOG.info(vfout);
+    LOG.info(vfout.toString());
     fin.close();
     assertEquals(vfout.getBytesRead(), byteAm);
     assertTrue(vfout.getChunksDifferent() == 0);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TruncateOp.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TruncateOp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TruncateOp.java
index 202d807..295b797 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TruncateOp.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TruncateOp.java
@@ -22,12 +22,12 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.List;
 import java.util.Random;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Operation which selects a random file and truncates a random amount of bytes
@@ -40,7 +40,7 @@ import org.apache.hadoop.fs.slive.OperationOutput.OutputType;
  */
 class TruncateOp extends Operation {
 
-  private static final Log LOG = LogFactory.getLog(TruncateOp.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TruncateOp.class);
 
   TruncateOp(ConfigExtractor cfg, Random rnd) {
     super(TruncateOp.class.getSimpleName(), cfg, rnd);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/WeightSelector.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/WeightSelector.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/WeightSelector.java
index d8acc39..3d80357 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/WeightSelector.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/WeightSelector.java
@@ -26,12 +26,12 @@ import java.util.Map;
 import java.util.Random;
 import java.util.TreeMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.slive.Constants.Distribution;
 import org.apache.hadoop.fs.slive.Constants.OperationType;
 import org.apache.hadoop.fs.slive.Weights.UniformWeight;
 import org.apache.hadoop.fs.slive.ObserveableOp.Observer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is the main handler that selects operations to run using the
@@ -47,7 +47,8 @@ class WeightSelector {
     Double weight(int elapsed, int duration);
   }
 
-  private static final Log LOG = LogFactory.getLog(WeightSelector.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(WeightSelector.class);
 
   private static class OperationInfo {
     Integer amountLeft;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBench.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBench.java
index 29eac43..2346c3c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBench.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBench.java
@@ -30,8 +30,6 @@ import java.util.Date;
 import java.util.Iterator;
 import java.util.StringTokenizer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -57,6 +55,8 @@ import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This program executes a specified operation that applies load to 
@@ -78,8 +78,7 @@ import org.apache.hadoop.util.ToolRunner;
  */
 
 public class NNBench extends Configured implements Tool {
-  private static final Log LOG = LogFactory.getLog(
-          "org.apache.hadoop.hdfs.NNBench");
+  private static final Logger LOG = LoggerFactory.getLogger(NNBench.class);
   
   private static String CONTROL_DIR_NAME = "control";
   private static String OUTPUT_DIR_NAME = "output";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBenchWithoutMR.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBenchWithoutMR.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBenchWithoutMR.java
index 9b63010..af16177 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBenchWithoutMR.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/hdfs/NNBenchWithoutMR.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.hdfs;
 import java.io.IOException;
 import java.util.Date;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -30,6 +28,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.mapred.JobConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This program executes a specified operation that applies load to 
@@ -45,8 +45,8 @@ import org.apache.hadoop.mapred.JobConf;
  */
 public class NNBenchWithoutMR {
   
-  private static final Log LOG = LogFactory.getLog(
-                                            "org.apache.hadoop.hdfs.NNBench");
+  private static final Logger LOG =
+      LoggerFactory.getLogger(NNBenchWithoutMR.class);
   
   // variable initialzed from command line arguments
   private static long startTime = 0;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/BigMapOutput.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/BigMapOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/BigMapOutput.java
index 14e32fd..964673b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/BigMapOutput.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/BigMapOutput.java
@@ -22,8 +22,6 @@ import java.io.IOException;
 import java.util.Date;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -37,10 +35,12 @@ import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class BigMapOutput extends Configured implements Tool {
-  public static final Log LOG =
-    LogFactory.getLog(BigMapOutput.class.getName());
+  public static final Logger LOG =
+      LoggerFactory.getLogger(BigMapOutput.class);
   private static Random random = new Random();
   public static String MIN_KEY = "mapreduce.bmo.minkey";
   public static String MIN_VALUE = "mapreduce.bmo.minvalue";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java
index 5286e86..5328756 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java
@@ -24,8 +24,6 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -33,13 +31,15 @@ import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Runs a job multiple times and takes average of all runs.
  */
 public class MRBench extends Configured implements Tool{
   
-  private static final Log LOG = LogFactory.getLog(MRBench.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MRBench.class);
   private static final String DEFAULT_INPUT_SUB = "mr_input";
   private static final String DEFAULT_OUTPUT_SUB = "mr_output";
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRCluster.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRCluster.java
index 2e144414..e7df5b3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRCluster.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRCluster.java
@@ -20,13 +20,13 @@ package org.apache.hadoop.mapred;
 import java.io.IOException;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is an MR2 replacement for older MR1 MiniMRCluster, that was used
@@ -45,7 +45,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class MiniMRCluster {
-  private static final Log LOG = LogFactory.getLog(MiniMRCluster.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MiniMRCluster.class);
 
   private MiniMRClientCluster mrClientCluster;
 
@@ -98,7 +99,7 @@ public class MiniMRCluster {
     try {
       jobConf = new JobConf(mrClientCluster.getConfig());
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.getMessage());
     }
     return jobConf;
   }
@@ -108,7 +109,7 @@ public class MiniMRCluster {
     try {
       jobConf = new JobConf(mrClientCluster.getConfig());
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.getMessage());
     }
     return jobConf;
   }
@@ -224,7 +225,7 @@ public class MiniMRCluster {
     try {
       jobConf = new JobConf(mrClientCluster.getConfig());
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.getMessage());
     }
     return jobConf;
   }
@@ -266,7 +267,7 @@ public class MiniMRCluster {
     try {
       mrClientCluster.stop();
     } catch (IOException e) {
-      LOG.error(e);
+      LOG.error(e.getMessage());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRYarnClusterAdapter.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRYarnClusterAdapter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRYarnClusterAdapter.java
index 94d6ff3..4f89840 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRYarnClusterAdapter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRYarnClusterAdapter.java
@@ -18,13 +18,13 @@
 
 package org.apache.hadoop.mapred;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.service.Service.STATE;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * An adapter for MiniMRYarnCluster providing a MiniMRClientCluster interface.
@@ -34,7 +34,8 @@ public class MiniMRYarnClusterAdapter implements MiniMRClientCluster {
 
   private MiniMRYarnCluster miniMRYarnCluster;
 
-  private static final Log LOG = LogFactory.getLog(MiniMRYarnClusterAdapter.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(MiniMRYarnClusterAdapter.class);
 
   public MiniMRYarnClusterAdapter(MiniMRYarnCluster miniMRYarnCluster) {
     this.miniMRYarnCluster = miniMRYarnCluster;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
index 983a4a7..303857b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
@@ -29,8 +29,6 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.StringTokenizer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -41,6 +39,8 @@ import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class tests reliability of the framework in the face of failures of
@@ -73,7 +73,8 @@ import org.apache.hadoop.util.ToolRunner;
 public class ReliabilityTest extends Configured implements Tool {
 
   private String dir;
-  private static final Log LOG = LogFactory.getLog(ReliabilityTest.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ReliabilityTest.class);
 
   private void displayUsage() {
     LOG.info("This must be run in only the distributed mode " +
@@ -207,7 +208,7 @@ public class ReliabilityTest extends Configured implements Tool {
               args);
           checkJobExitStatus(status, jobClass);
         } catch (Exception e) {
-          LOG.fatal("JOB " + jobClass + " failed to run");
+          LOG.error("JOB " + jobClass + " failed to run");
           System.exit(-1);
         }
       }
@@ -325,7 +326,7 @@ public class ReliabilityTest extends Configured implements Tool {
           killed = true;
           return;
         } catch (Exception e) {
-          LOG.fatal(StringUtils.stringifyException(e));
+          LOG.error(StringUtils.stringifyException(e));
         }
       }
     }
@@ -495,7 +496,7 @@ public class ReliabilityTest extends Configured implements Tool {
         } catch (InterruptedException ie) {
           killed = true;
         } catch (Exception e) {
-          LOG.fatal(StringUtils.stringifyException(e));
+          LOG.error(StringUtils.stringifyException(e));
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
index c2d6257..b45a2a6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java
@@ -30,8 +30,6 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.StringTokenizer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
@@ -41,6 +39,8 @@ import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
@@ -48,8 +48,8 @@ import static org.junit.Assert.assertNotNull;
 @Ignore
 public class TestBadRecords extends ClusterMapReduceTestCase {
   
-  private static final Log LOG = 
-    LogFactory.getLog(TestBadRecords.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestBadRecords.class);
   
   private static final List<String> MAPPER_BAD_RECORDS = 
     Arrays.asList("hello01","hello04","hello05");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
index a9b4626..f97d0a4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
@@ -24,8 +24,6 @@ import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.mapreduce.Cluster;
@@ -144,6 +142,8 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
 import org.junit.Assert;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestClientRedirect {
 
@@ -151,7 +151,8 @@ public class TestClientRedirect {
     DefaultMetricsSystem.setMiniClusterMode(true);
   }
 
-  private static final Log LOG = LogFactory.getLog(TestClientRedirect.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestClientRedirect.class);
   private static final String RMADDRESS = "0.0.0.0:8054";
   private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java
index de7880d..4ed9eb2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineFileInputFormat.java
@@ -26,16 +26,15 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
 import org.apache.hadoop.mapred.lib.CombineFileSplit;
 import org.apache.hadoop.mapred.lib.CombineFileRecordReader;
-
 import org.junit.Test;
-import static org.junit.Assert.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import static org.junit.Assert.*;
 
 public class TestCombineFileInputFormat {
-  private static final Log LOG =
-    LogFactory.getLog(TestCombineFileInputFormat.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestCombineFileInputFormat.class);
   
   private static JobConf defaultConf = new JobConf();
   private static FileSystem localFs = null; 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java
index 8cdaa80..4f1d6ba 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java
@@ -25,8 +25,6 @@ import java.io.IOException;
 import java.util.BitSet;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -36,10 +34,12 @@ import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.mapred.lib.CombineFileSplit;
 import org.apache.hadoop.mapred.lib.CombineSequenceFileInputFormat;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestCombineSequenceFileInputFormat {
-  private static final Log LOG =
-    LogFactory.getLog(TestCombineSequenceFileInputFormat.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestCombineSequenceFileInputFormat.class);
 
   private static Configuration conf = new Configuration();
   private static FileSystem localFs = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java
index 581e62b..394630c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineTextInputFormat.java
@@ -31,8 +31,6 @@ import java.util.BitSet;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
@@ -43,10 +41,12 @@ import org.apache.hadoop.mapred.lib.CombineFileSplit;
 import org.apache.hadoop.mapred.lib.CombineTextInputFormat;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestCombineTextInputFormat {
-  private static final Log LOG =
-    LogFactory.getLog(TestCombineTextInputFormat.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestCombineTextInputFormat.class);
 
   private static JobConf defaultConf = new JobConf();
   private static FileSystem localFs = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
index 15d651d..977d083 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
@@ -30,8 +30,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.zip.Inflater;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
@@ -46,10 +44,13 @@ import org.apache.hadoop.util.ReflectionUtils;
 import org.junit.After;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 @Ignore
 public class TestConcatenatedCompressedInput {
-  private static final Log LOG =
-    LogFactory.getLog(TestConcatenatedCompressedInput.class.getName());
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestConcatenatedCompressedInput.class);
   private static int MAX_LENGTH = 10000;
   private static JobConf defaultConf = new JobConf();
   private static FileSystem localFs = null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
index 8013feb..4864dd0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
@@ -26,8 +26,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -35,14 +33,18 @@ import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.compress.*;
 import org.apache.hadoop.util.ReflectionUtils;
-
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import static org.junit.Assert.*;
 
 public class TestFixedLengthInputFormat {
 
-  private static Log LOG;
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestFixedLengthInputFormat.class);
+
   private static Configuration defaultConf;
   private static FileSystem localFs; 
   private static Path workDir;
@@ -55,7 +57,6 @@ public class TestFixedLengthInputFormat {
   @BeforeClass
   public static void onlyOnce() {
     try {
-      LOG = LogFactory.getLog(TestFixedLengthInputFormat.class.getName());
       defaultConf = new Configuration();
       defaultConf.set("fs.defaultFS", "file:///");
       localFs = FileSystem.getLocal(defaultConf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4cae977/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java
index bf762d9..13f2301 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobCleanup.java
@@ -22,9 +22,6 @@ import java.io.DataOutputStream;
 import java.io.File;
 import java.io.IOException;
 
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.Log;
-
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
@@ -36,6 +33,9 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.LoggerFactory;
+import org.slf4j.Logger;
+
 import static org.junit.Assert.*;
 
 /**
@@ -54,7 +54,8 @@ public class TestJobCleanup {
   private static Path emptyInDir = null;
   private static int outDirs = 0;
 
-  private static Log LOG = LogFactory.getLog(TestJobCleanup.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestJobCleanup.class);
 
   @BeforeClass
   public static void setUp() throws IOException {


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message