crunch-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mkw...@apache.org
Subject [2/2] git commit: CRUNCH-476: Consolidated use of logging in projects to us SLF4J, dependency cleanup, and a few javadoc fixes.
Date Tue, 28 Oct 2014 01:09:16 GMT
CRUNCH-476: Consolidated use of logging in projects to us SLF4J, dependency cleanup, and a few javadoc fixes.

CRUNCH-476: Uplifted logging to use slf4j formatting vs pre-formatting messages, also removed dependency on commons-logging.


Project: http://git-wip-us.apache.org/repos/asf/crunch/repo
Commit: http://git-wip-us.apache.org/repos/asf/crunch/commit/82cecc0c
Tree: http://git-wip-us.apache.org/repos/asf/crunch/tree/82cecc0c
Diff: http://git-wip-us.apache.org/repos/asf/crunch/diff/82cecc0c

Branch: refs/heads/master
Commit: 82cecc0cea64cb3cd47d6bc0c9562e49509b34ca
Parents: 0157d03
Author: Micah Whitacre <mkwhit@apache.org>
Authored: Sat Oct 25 21:13:29 2014 -0500
Committer: Micah Whitacre <mkwhit@apache.org>
Committed: Sun Oct 26 21:04:00 2014 -0500

----------------------------------------------------------------------
 crunch-contrib/pom.xml                          |  5 ++++
 .../contrib/text/AbstractSimpleExtractor.java   | 10 +++-----
 crunch-core/pom.xml                             |  5 ----
 .../impl/dist/collect/UnionCollectionIT.java    | 16 ++++++------
 .../org/apache/crunch/PipelineCallable.java     |  9 +++----
 .../mapreduce/TaskAttemptContextFactory.java    | 10 ++++----
 .../lib/jobcontrol/CrunchControlledJob.java     | 14 +++++------
 .../lib/jobcontrol/CrunchJobControl.java        |  6 ++---
 .../TaskInputOutputContextFactory.java          |  8 +++---
 .../crunch/impl/dist/DistributedPipeline.java   |  6 ++---
 .../org/apache/crunch/impl/mem/MemPipeline.java | 14 +++++------
 .../org/apache/crunch/impl/mr/MRPipeline.java   |  8 +++---
 .../impl/mr/collect/PGroupedTableImpl.java      |  8 +++---
 .../apache/crunch/impl/mr/exec/MRExecutor.java  |  6 ++---
 .../apache/crunch/impl/mr/plan/MSCRPlanner.java |  6 ++---
 .../apache/crunch/impl/mr/run/CrunchMapper.java |  6 ++---
 .../crunch/impl/mr/run/CrunchReducer.java       |  6 ++---
 .../org/apache/crunch/impl/mr/run/RTNode.java   |  8 +++---
 .../crunch/io/avro/AvroFileReaderFactory.java   |  9 +++----
 .../crunch/io/avro/AvroPathPerKeyTarget.java    |  8 +++---
 .../io/avro/trevni/TrevniFileReaderFactory.java |  8 +++---
 .../apache/crunch/io/impl/FileSourceImpl.java   | 13 +++++-----
 .../apache/crunch/io/impl/FileTargetImpl.java   | 26 ++++++++++----------
 .../crunch/io/seq/SeqFileReaderFactory.java     | 11 ++++-----
 .../crunch/io/text/TextFileReaderFactory.java   |  8 +++---
 .../io/text/csv/CSVFileReaderFactory.java       |  8 +++---
 .../crunch/io/text/csv/CSVFileSource.java       |  2 +-
 .../crunch/io/text/csv/CSVReadableData.java     |  6 ++---
 .../crunch/io/text/csv/CSVRecordIterator.java   |  6 ++---
 .../lib/join/BloomFilterJoinStrategy.java       |  5 ----
 .../materialize/MaterializableIterable.java     |  8 +++---
 .../org/apache/crunch/types/avro/Avros.java     | 10 ++++----
 .../apache/crunch/types/writable/Writables.java | 11 ++++-----
 crunch-examples/pom.xml                         | 10 ++------
 .../crunch/examples/WordAggregationHBase.java   |  6 ++---
 crunch-hbase/pom.xml                            | 10 ++------
 .../crunch/io/hbase/HBaseSourceTarget.java      | 11 +++------
 .../org/apache/crunch/io/hbase/HBaseTarget.java |  6 ++---
 .../crunch/io/hbase/HFileInputFormat.java       | 21 ++++++++--------
 .../io/hbase/HFileOutputFormatForCrunch.java    | 12 ++++-----
 .../org/apache/crunch/io/hbase/HFileSource.java | 10 ++++----
 .../org/apache/crunch/io/hbase/HFileUtils.java  | 10 ++++----
 .../apache/crunch/io/hbase/HTableIterator.java  |  8 +++---
 .../org/apache/crunch/types/orc/OrcUtils.java   |  2 +-
 crunch-spark/pom.xml                            |  5 ++++
 .../apache/crunch/impl/spark/SparkPipeline.java |  6 ++---
 .../apache/crunch/impl/spark/SparkRuntime.java  | 10 ++++----
 .../impl/spark/collect/PGroupedTableImpl.java   |  6 ++---
 pom.xml                                         |  7 ------
 49 files changed, 199 insertions(+), 231 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-contrib/pom.xml
----------------------------------------------------------------------
diff --git a/crunch-contrib/pom.xml b/crunch-contrib/pom.xml
index b3e4f54..2fed6a7 100644
--- a/crunch-contrib/pom.xml
+++ b/crunch-contrib/pom.xml
@@ -34,6 +34,11 @@ under the License.
       <groupId>org.apache.crunch</groupId>
       <artifactId>crunch-core</artifactId>
     </dependency>
+
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+    </dependency>
     
     <dependency>
       <groupId>org.apache.crunch</groupId>

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-contrib/src/main/java/org/apache/crunch/contrib/text/AbstractSimpleExtractor.java
----------------------------------------------------------------------
diff --git a/crunch-contrib/src/main/java/org/apache/crunch/contrib/text/AbstractSimpleExtractor.java b/crunch-contrib/src/main/java/org/apache/crunch/contrib/text/AbstractSimpleExtractor.java
index aebc60a..d981f23 100644
--- a/crunch-contrib/src/main/java/org/apache/crunch/contrib/text/AbstractSimpleExtractor.java
+++ b/crunch-contrib/src/main/java/org/apache/crunch/contrib/text/AbstractSimpleExtractor.java
@@ -17,8 +17,8 @@
  */
 package org.apache.crunch.contrib.text;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Base class for the common case {@code Extractor} instances that construct a single
@@ -27,7 +27,7 @@ import org.apache.commons.logging.LogFactory;
  */
 public abstract class AbstractSimpleExtractor<T> implements Extractor<T> {
 
-  private static final Log LOG = LogFactory.getLog(AbstractSimpleExtractor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AbstractSimpleExtractor.class);
   private static final int LOG_ERROR_LIMIT = 100;
   
   private int errors;
@@ -60,9 +60,7 @@ public abstract class AbstractSimpleExtractor<T> implements Extractor<T> {
       errorOnLast = true;
       errors++;
       if (errors < LOG_ERROR_LIMIT) {
-        String msg = String.format("Error occurred parsing input '%s' using extractor %s",
-            input, this); 
-        LOG.error(msg);
+        LOG.error("Error occurred parsing input '{}' using extractor {}", input, this);
       }
     }
     return res;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/pom.xml
----------------------------------------------------------------------
diff --git a/crunch-core/pom.xml b/crunch-core/pom.xml
index 78b5eaa..c61940e 100644
--- a/crunch-core/pom.xml
+++ b/crunch-core/pom.xml
@@ -130,11 +130,6 @@ under the License.
       <artifactId>libthrift</artifactId>
       <scope>provided</scope>
     </dependency>
-
-    <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-    </dependency>
    
     <dependency>
       <groupId>org.slf4j</groupId>

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/it/java/org/apache/crunch/impl/dist/collect/UnionCollectionIT.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/it/java/org/apache/crunch/impl/dist/collect/UnionCollectionIT.java b/crunch-core/src/it/java/org/apache/crunch/impl/dist/collect/UnionCollectionIT.java
index 51b3dda..f45b7fa 100644
--- a/crunch-core/src/it/java/org/apache/crunch/impl/dist/collect/UnionCollectionIT.java
+++ b/crunch-core/src/it/java/org/apache/crunch/impl/dist/collect/UnionCollectionIT.java
@@ -26,8 +26,6 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.PCollection;
 import org.apache.crunch.PTableKeyValueIT;
 import org.apache.crunch.Pipeline;
@@ -49,13 +47,15 @@ import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 
 import com.google.common.collect.Lists;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @RunWith(value = Parameterized.class)
 public class UnionCollectionIT {
   @Rule
   public TemporaryPath tmpDir = TemporaryPaths.create();
 
-  private static final Log LOG = LogFactory.getLog(UnionCollectionIT.class);
+  private static final Logger LOG = LoggerFactory.getLogger(UnionCollectionIT.class);
 
   private PTypeFamily typeFamily;
   private Pipeline pipeline;
@@ -78,9 +78,9 @@ public class UnionCollectionIT {
     PCollection<String> firstCollection = pipeline.read(At.textFile(inputFile1, typeFamily.strings()));
     PCollection<String> secondCollection = pipeline.read(At.textFile(inputFile2, typeFamily.strings()));
 
-    LOG.info("Test fixture: [" + pipeline.getClass().getSimpleName() + " : " + typeFamily.getClass().getSimpleName()
-        + "]  First: " + Lists.newArrayList(firstCollection.materialize().iterator()) + ", Second: "
-        + Lists.newArrayList(secondCollection.materialize().iterator()));
+    LOG.info("Test fixture: [ {} : {}]  First: {}, Second: {}", new Object[]{pipeline.getClass().getSimpleName()
+        ,typeFamily.getClass().getSimpleName(), Lists.newArrayList(firstCollection.materialize().iterator()),
+        Lists.newArrayList(secondCollection.materialize().iterator())});
 
     union = secondCollection.union(firstCollection);
   }
@@ -107,7 +107,7 @@ public class UnionCollectionIT {
   private void checkMaterialized(Iterable<String> materialized) {
     List<String> materializedValues = Lists.newArrayList(materialized.iterator());
     Collections.sort(materializedValues);
-    LOG.info("Materialized union: " + materializedValues);
+    LOG.info("Materialized union: {}", materializedValues);
     assertEquals(EXPECTED, materializedValues);
   }
 
@@ -148,7 +148,7 @@ public class UnionCollectionIT {
 
     Collections.sort(fileContentValues);
 
-    LOG.info("Saved Union: " + fileContentValues);
+    LOG.info("Saved Union: {}", fileContentValues);
     assertEquals(EXPECTED, fileContentValues);
   }
 }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/PipelineCallable.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/PipelineCallable.java b/crunch-core/src/main/java/org/apache/crunch/PipelineCallable.java
index e1b16aa..9f6829e 100644
--- a/crunch-core/src/main/java/org/apache/crunch/PipelineCallable.java
+++ b/crunch-core/src/main/java/org/apache/crunch/PipelineCallable.java
@@ -20,9 +20,9 @@ package org.apache.crunch;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Maps;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import parquet.Preconditions;
 
 import java.util.Map;
@@ -76,7 +76,7 @@ import java.util.concurrent.Callable;
  */
 public abstract class PipelineCallable<Output> implements Callable<PipelineCallable.Status> {
 
-  private static final Log LOG = LogFactory.getLog(PipelineCallable.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PipelineCallable.class);
 
   public enum Status { SUCCESS, FAILURE };
 
@@ -183,8 +183,7 @@ public abstract class PipelineCallable<Output> implements Callable<PipelineCalla
    */
   public String getMessage() {
     if (message == null) {
-      LOG.warn("No message specified for PipelineCallable instance \"" + getName() +
-          "\". Consider overriding PipelineCallable.getMessage()");
+      LOG.warn("No message specified for PipelineCallable instance \"{}\". Consider overriding PipelineCallable.getMessage()", getName());
       return toString();
     }
     return message;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/TaskAttemptContextFactory.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/TaskAttemptContextFactory.java b/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/TaskAttemptContextFactory.java
index 887c051..256fa42 100644
--- a/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/TaskAttemptContextFactory.java
+++ b/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/TaskAttemptContextFactory.java
@@ -19,11 +19,11 @@ package org.apache.crunch.hadoop.mapreduce;
 
 import java.lang.reflect.Constructor;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A factory class that allows us to hide the fact that {@code TaskAttemptContext} is a class in
@@ -32,7 +32,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptID;
 @SuppressWarnings("unchecked")
 public class TaskAttemptContextFactory {
 
-  private static final Log LOG = LogFactory.getLog(TaskAttemptContextFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TaskAttemptContextFactory.class);
 
   private static final TaskAttemptContextFactory INSTANCE = new TaskAttemptContextFactory();
 
@@ -49,13 +49,13 @@ public class TaskAttemptContextFactory {
         implClass = (Class<TaskAttemptContext>) Class.forName(
             "org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
       } catch (ClassNotFoundException e) {
-        LOG.fatal("Could not find TaskAttemptContextImpl class, exiting", e);
+        LOG.error("Could not find TaskAttemptContextImpl class, exiting", e);
       }
     }
     try {
       this.taskAttemptConstructor = implClass.getConstructor(Configuration.class, TaskAttemptID.class);
     } catch (Exception e) {
-      LOG.fatal("Could not access TaskAttemptContext constructor, exiting", e);
+      LOG.error("Could not access TaskAttemptContext constructor, exiting", e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/CrunchControlledJob.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/CrunchControlledJob.java b/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/CrunchControlledJob.java
index ce80691..dceb217 100644
--- a/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/CrunchControlledJob.java
+++ b/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/CrunchControlledJob.java
@@ -21,8 +21,6 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.Target;
 import org.apache.crunch.impl.mr.MRJob;
 import org.apache.crunch.impl.mr.plan.JobNameBuilder;
@@ -35,6 +33,8 @@ import org.apache.hadoop.util.StringUtils;
 import com.google.common.base.Function;
 import com.google.common.base.Objects;
 import com.google.common.collect.Lists;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class encapsulates a MapReduce job and its dependency. It monitors the
@@ -52,7 +52,7 @@ public class CrunchControlledJob implements MRJob {
     public void run() throws IOException;
   }
 
-  private static final Log LOG = LogFactory.getLog(CrunchControlledJob.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CrunchControlledJob.class);
 
   private final int jobID;
   private final Job job; // mapreduce job to be executed.
@@ -339,12 +339,12 @@ public class CrunchControlledJob implements MRJob {
       this.jobStartTimeMsec = System.currentTimeMillis();
       job.submit();
       this.state = State.RUNNING;
-      LOG.info("Running job \"" + getJobName() + "\"");
-      LOG.info("Job status available at: " + job.getTrackingURL());
+      LOG.info("Running job \"{}\"", getJobName());
+      LOG.info("Job status available at: {}", job.getTrackingURL());
     } catch (Exception ioe) {
       this.state = State.FAILED;
       this.message = StringUtils.stringifyException(ioe);
-      LOG.info("Error occurred starting job \"" + getJobName() + "\":");
+      LOG.info("Error occurred starting job \"{}\":", getJobName());
       LOG.info(getMessage());
     }
   }
@@ -353,7 +353,7 @@ public class CrunchControlledJob implements MRJob {
     String progress = String.format("map %.0f%% reduce %.0f%%",
         100.0 * job.mapProgress(), 100.0 * job.reduceProgress());
     if (!Objects.equal(lastKnownProgress, progress)) {
-      LOG.info(job.getJobName() + " progress: " + progress);
+      LOG.info("{} progress: {}", job.getJobName(), progress);
       lastKnownProgress = progress;
     }
   }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/CrunchJobControl.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/CrunchJobControl.java b/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/CrunchJobControl.java
index d23d821..aac6296 100644
--- a/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/CrunchJobControl.java
+++ b/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/CrunchJobControl.java
@@ -33,13 +33,13 @@ import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import com.google.common.util.concurrent.ListeningExecutorService;
 import com.google.common.util.concurrent.MoreExecutors;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.PipelineCallable;
 import org.apache.crunch.Target;
 import org.apache.crunch.impl.mr.MRJob.State;
 import org.apache.crunch.impl.mr.run.RuntimeParameters;
 import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class encapsulates a set of MapReduce jobs and its dependency.
@@ -62,7 +62,7 @@ public class CrunchJobControl {
   private Set<PipelineCallable<?>> activePipelineCallables;
   private List<PipelineCallable<?>> failedCallables;
 
-  private Log log = LogFactory.getLog(CrunchJobControl.class);
+  private Logger log = LoggerFactory.getLogger(CrunchJobControl.class);
 
   private final String groupName;
   private final int maxRunningJobs;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/TaskInputOutputContextFactory.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/TaskInputOutputContextFactory.java b/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/TaskInputOutputContextFactory.java
index 1aa65b3..d005c75 100644
--- a/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/TaskInputOutputContextFactory.java
+++ b/crunch-core/src/main/java/org/apache/crunch/hadoop/mapreduce/lib/jobcontrol/TaskInputOutputContextFactory.java
@@ -17,8 +17,6 @@
  */
 package org.apache.crunch.hadoop.mapreduce.lib.jobcontrol;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.OutputCommitter;
@@ -27,11 +25,13 @@ import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.StatusReporter;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskInputOutputContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.lang.reflect.Constructor;
 
 public class TaskInputOutputContextFactory {
-  private static final Log LOG = LogFactory.getLog(TaskInputOutputContextFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TaskInputOutputContextFactory.class);
 
   private static final TaskInputOutputContextFactory INSTANCE = new TaskInputOutputContextFactory();
 
@@ -56,7 +56,7 @@ public class TaskInputOutputContextFactory {
           OutputCommitter.class, StatusReporter.class, InputSplit.class);
       this.arity = 7;
     } catch (Exception e) {
-      LOG.fatal("Could not access TaskInputOutputContext constructor, exiting", e);
+      LOG.error("Could not access TaskInputOutputContext constructor, exiting", e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/impl/dist/DistributedPipeline.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/impl/dist/DistributedPipeline.java b/crunch-core/src/main/java/org/apache/crunch/impl/dist/DistributedPipeline.java
index 6b3da5e..2d236ae 100644
--- a/crunch-core/src/main/java/org/apache/crunch/impl/dist/DistributedPipeline.java
+++ b/crunch-core/src/main/java/org/apache/crunch/impl/dist/DistributedPipeline.java
@@ -19,8 +19,6 @@ package org.apache.crunch.impl.dist;
 
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.CrunchRuntimeException;
 import org.apache.crunch.MapFn;
 import org.apache.crunch.PCollection;
@@ -54,6 +52,8 @@ import org.apache.crunch.types.writable.Writables;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.Map;
@@ -61,7 +61,7 @@ import java.util.Random;
 import java.util.Set;
 
 public abstract class DistributedPipeline implements Pipeline {
-  private static final Log LOG = LogFactory.getLog(DistributedPipeline.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DistributedPipeline.class);
 
   private static final Random RANDOM = new Random();
 

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/impl/mem/MemPipeline.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/impl/mem/MemPipeline.java b/crunch-core/src/main/java/org/apache/crunch/impl/mem/MemPipeline.java
index 5996bfa..23b9e04 100644
--- a/crunch-core/src/main/java/org/apache/crunch/impl/mem/MemPipeline.java
+++ b/crunch-core/src/main/java/org/apache/crunch/impl/mem/MemPipeline.java
@@ -27,8 +27,6 @@ import java.util.concurrent.TimeoutException;
 import com.google.common.base.Charsets;
 import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.io.DatumWriter;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.CachingOptions;
 import org.apache.crunch.CrunchRuntimeException;
 import org.apache.crunch.PCollection;
@@ -65,10 +63,12 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import com.google.common.util.concurrent.AbstractFuture;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class MemPipeline implements Pipeline {
 
-  private static final Log LOG = LogFactory.getLog(MemPipeline.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MemPipeline.class);
   private static Counters COUNTERS = new CountersWrapper();
   private static final MemPipeline INSTANCE = new MemPipeline();
 
@@ -155,7 +155,7 @@ public class MemPipeline implements Pipeline {
         throw new IllegalStateException(e);
       }
     }
-    LOG.error("Source " + source + " is not readable");
+    LOG.error("Source {} is not readable", source);
     throw new IllegalStateException("Source " + source + " is not readable");
   }
 
@@ -166,11 +166,11 @@ public class MemPipeline implements Pipeline {
         Iterable<Pair<K, V>> iterable = ((ReadableSource<Pair<K, V>>) source).read(conf);
         return new MemTable<K, V>(iterable, source.getTableType(), source.toString());
       } catch (IOException e) {
-        LOG.error("Exception reading source: " + source.toString(), e);
+        LOG.error("Exception reading source: " + source, e);
         throw new IllegalStateException(e);
       }
     }
-    LOG.error("Source " + source + " is not readable");
+    LOG.error("Source {} is not readable", source);
     throw new IllegalStateException("Source " + source + " is not readable");
   }
 
@@ -236,7 +236,7 @@ public class MemPipeline implements Pipeline {
         LOG.error("Exception writing target: " + target, e);
       }
     } else {
-      LOG.error("Target " + target + " is not a PathTarget instance");
+      LOG.error("Target {} is not a PathTarget instance", target);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/impl/mr/MRPipeline.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/impl/mr/MRPipeline.java b/crunch-core/src/main/java/org/apache/crunch/impl/mr/MRPipeline.java
index bf3f58a..1c48c62 100644
--- a/crunch-core/src/main/java/org/apache/crunch/impl/mr/MRPipeline.java
+++ b/crunch-core/src/main/java/org/apache/crunch/impl/mr/MRPipeline.java
@@ -27,8 +27,6 @@ import java.util.Map;
 
 import com.google.common.base.Charsets;
 import com.google.common.collect.Maps;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.CachingOptions;
 import org.apache.crunch.CrunchRuntimeException;
 import org.apache.crunch.PCollection;
@@ -46,13 +44,15 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Pipeline implementation that is executed within Hadoop MapReduce.
  */
 public class MRPipeline extends DistributedPipeline {
 
-  private static final Log LOG = LogFactory.getLog(MRPipeline.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MRPipeline.class);
 
   private final Class<?> jarClass;
 
@@ -176,7 +176,7 @@ public class MRPipeline extends DistributedPipeline {
         final int maxPipeNameLength = 150;
         String filenamePrefix = encodedName.substring(0, Math.min(maxPipeNameLength, encodedName.length()));
         Path jobPlanPath = new Path(uri.getPath(), filenamePrefix + filenameSuffix);
-        LOG.info("Writing jobplan to " + jobPlanPath);
+        LOG.info("Writing jobplan to {}", jobPlanPath);
         outputStream = fs.create(jobPlanPath, true);
         outputStream.write(dotFileContents.getBytes(Charsets.UTF_8));
       } catch (URISyntaxException e) {

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/impl/mr/collect/PGroupedTableImpl.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/impl/mr/collect/PGroupedTableImpl.java b/crunch-core/src/main/java/org/apache/crunch/impl/mr/collect/PGroupedTableImpl.java
index 23578ee..0c8de36 100644
--- a/crunch-core/src/main/java/org/apache/crunch/impl/mr/collect/PGroupedTableImpl.java
+++ b/crunch-core/src/main/java/org/apache/crunch/impl/mr/collect/PGroupedTableImpl.java
@@ -17,8 +17,6 @@
  */
 package org.apache.crunch.impl.mr.collect;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.GroupingOptions;
 import org.apache.crunch.impl.dist.collect.BaseGroupedTable;
 import org.apache.crunch.impl.dist.collect.MRCollection;
@@ -26,10 +24,12 @@ import org.apache.crunch.impl.dist.collect.PTableBase;
 import org.apache.crunch.impl.mr.plan.DoNode;
 import org.apache.crunch.util.PartitionUtils;
 import org.apache.hadoop.mapreduce.Job;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class PGroupedTableImpl<K, V> extends BaseGroupedTable<K, V> implements MRCollection {
 
-  private static final Log LOG = LogFactory.getLog(PGroupedTableImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PGroupedTableImpl.class);
 
   PGroupedTableImpl(PTableBase<K, V> parent, GroupingOptions groupingOptions) {
     super(parent, groupingOptions);
@@ -41,7 +41,7 @@ public class PGroupedTableImpl<K, V> extends BaseGroupedTable<K, V> implements M
       int numReduceTasks = PartitionUtils.getRecommendedPartitions(this, getPipeline().getConfiguration());
       if (numReduceTasks > 0) {
         job.setNumReduceTasks(numReduceTasks);
-        LOG.info(String.format("Setting num reduce tasks to %d", numReduceTasks));
+        LOG.info("Setting num reduce tasks to {}", numReduceTasks);
       } else {
         LOG.warn("Attempted to set a negative number of reduce tasks");
       }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/impl/mr/exec/MRExecutor.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/impl/mr/exec/MRExecutor.java b/crunch-core/src/main/java/org/apache/crunch/impl/mr/exec/MRExecutor.java
index 22f2d1b..2cefc04 100644
--- a/crunch-core/src/main/java/org/apache/crunch/impl/mr/exec/MRExecutor.java
+++ b/crunch-core/src/main/java/org/apache/crunch/impl/mr/exec/MRExecutor.java
@@ -20,8 +20,6 @@ package org.apache.crunch.impl.mr.exec;
 import com.google.common.base.Function;
 import com.google.common.collect.Lists;
 import com.google.common.util.concurrent.AbstractFuture;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.PipelineCallable;
 import org.apache.crunch.PipelineResult;
 import org.apache.crunch.SourceTarget;
@@ -33,6 +31,8 @@ import org.apache.crunch.impl.mr.MRJob;
 import org.apache.crunch.impl.mr.MRPipelineExecution;
 import org.apache.crunch.materialize.MaterializableIterable;
 import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.List;
 import java.util.Map;
@@ -54,7 +54,7 @@ import java.util.concurrent.atomic.AtomicReference;
  */
 public class MRExecutor extends AbstractFuture<PipelineResult> implements MRPipelineExecution {
 
-  private static final Log LOG = LogFactory.getLog(MRExecutor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MRExecutor.class);
 
   private final CrunchJobControl control;
   private final Map<PCollectionImpl<?>, Set<Target>> outputTargets;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/impl/mr/plan/MSCRPlanner.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/impl/mr/plan/MSCRPlanner.java b/crunch-core/src/main/java/org/apache/crunch/impl/mr/plan/MSCRPlanner.java
index 7a1ff4e..70acb59 100644
--- a/crunch-core/src/main/java/org/apache/crunch/impl/mr/plan/MSCRPlanner.java
+++ b/crunch-core/src/main/java/org/apache/crunch/impl/mr/plan/MSCRPlanner.java
@@ -25,8 +25,6 @@ import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.PipelineCallable;
 import org.apache.crunch.Source;
 import org.apache.crunch.SourceTarget;
@@ -45,10 +43,12 @@ import com.google.common.collect.Maps;
 import com.google.common.collect.Multimap;
 import com.google.common.collect.Sets;
 import com.google.common.collect.ImmutableMultimap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class MSCRPlanner {
 
-  private static final Log LOG = LogFactory.getLog(MSCRPlanner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MSCRPlanner.class);
 
   private final MRPipeline pipeline;
   private final Map<PCollectionImpl<?>, Set<Target>> outputs;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/CrunchMapper.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/CrunchMapper.java b/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/CrunchMapper.java
index 0e2ef38..4c88c23 100644
--- a/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/CrunchMapper.java
+++ b/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/CrunchMapper.java
@@ -19,13 +19,13 @@ package org.apache.crunch.impl.mr.run;
 
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.Mapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class CrunchMapper extends Mapper<Object, Object, Object, Object> {
 
-  private static final Log LOG = LogFactory.getLog(CrunchMapper.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CrunchMapper.class);
 
   private RTNode node;
   private CrunchTaskContext ctxt;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/CrunchReducer.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/CrunchReducer.java b/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/CrunchReducer.java
index c3e3e3e..53e925d 100644
--- a/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/CrunchReducer.java
+++ b/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/CrunchReducer.java
@@ -17,14 +17,14 @@
  */
 package org.apache.crunch.impl.mr.run;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.impl.SingleUseIterable;
 import org.apache.hadoop.mapreduce.Reducer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class CrunchReducer extends Reducer<Object, Object, Object, Object> {
 
-  private static final Log LOG = LogFactory.getLog(CrunchReducer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CrunchReducer.class);
 
   private RTNode node;
   private CrunchTaskContext ctxt;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/RTNode.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/RTNode.java b/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/RTNode.java
index 5a7949e..650d0c9 100644
--- a/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/RTNode.java
+++ b/crunch-core/src/main/java/org/apache/crunch/impl/mr/run/RTNode.java
@@ -20,8 +20,6 @@ package org.apache.crunch.impl.mr.run;
 import java.io.Serializable;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.CrunchRuntimeException;
 import org.apache.crunch.DoFn;
 import org.apache.crunch.Emitter;
@@ -31,10 +29,12 @@ import org.apache.crunch.impl.mr.emit.OutputEmitter;
 import org.apache.crunch.types.Converter;
 import org.apache.crunch.types.PType;
 import org.apache.hadoop.conf.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class RTNode implements Serializable {
 
-  private static final Log LOG = LogFactory.getLog(RTNode.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RTNode.class);
 
   private final String nodeName;
   private DoFn<Object, Object> fn;
@@ -98,7 +98,7 @@ public class RTNode implements Serializable {
       fn.process(input, emitter);
     } catch (CrunchRuntimeException e) {
       if (!e.wasLogged()) {
-        LOG.info(String.format("Crunch exception in '%s' for input: %s", nodeName, input), e);
+        LOG.info("Crunch exception in '{}' for input: {}", new Object[]{nodeName, input, e});
         e.markLogged();
       }
       throw e;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/avro/AvroFileReaderFactory.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/avro/AvroFileReaderFactory.java b/crunch-core/src/main/java/org/apache/crunch/io/avro/AvroFileReaderFactory.java
index 5128fd6..0f8b656 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/avro/AvroFileReaderFactory.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/avro/AvroFileReaderFactory.java
@@ -24,10 +24,7 @@ import org.apache.avro.Schema;
 import org.apache.avro.file.DataFileReader;
 import org.apache.avro.io.DatumReader;
 import org.apache.avro.mapred.FsInput;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.MapFn;
-import org.apache.crunch.fn.IdentityFn;
 import org.apache.crunch.io.FileReaderFactory;
 import org.apache.crunch.io.impl.AutoClosingIterator;
 import org.apache.crunch.types.avro.AvroMode;
@@ -38,10 +35,12 @@ import org.apache.hadoop.fs.Path;
 
 import com.google.common.collect.Iterators;
 import com.google.common.collect.UnmodifiableIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class AvroFileReaderFactory<T> implements FileReaderFactory<T> {
 
-  private static final Log LOG = LogFactory.getLog(AvroFileReaderFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AvroFileReaderFactory.class);
 
   private DatumReader<T> reader;
   private final AvroType<?> atype;
@@ -85,7 +84,7 @@ public class AvroFileReaderFactory<T> implements FileReaderFactory<T> {
         }
       });
     } catch (IOException e) {
-      LOG.info("Could not read avro file at path: " + path, e);
+      LOG.info("Could not read avro file at path: {}", path, e);
       return Iterators.emptyIterator();
     }
   }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/avro/AvroPathPerKeyTarget.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/avro/AvroPathPerKeyTarget.java b/crunch-core/src/main/java/org/apache/crunch/io/avro/AvroPathPerKeyTarget.java
index 5c47b8a..76a2cf7 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/avro/AvroPathPerKeyTarget.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/avro/AvroPathPerKeyTarget.java
@@ -18,8 +18,6 @@
 package org.apache.crunch.io.avro;
 
 import org.apache.avro.mapred.AvroWrapper;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.impl.mr.plan.PlanningParameters;
 import org.apache.crunch.io.FileNamingScheme;
 import org.apache.crunch.io.FormatBundle;
@@ -37,6 +35,8 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 
@@ -49,7 +49,7 @@ import java.io.IOException;
  */
 public class AvroPathPerKeyTarget extends FileTargetImpl {
 
-  private static final Log LOG = LogFactory.getLog(AvroPathPerKeyTarget.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AvroPathPerKeyTarget.class);
 
   public AvroPathPerKeyTarget(String path) {
     this(new Path(path));
@@ -94,7 +94,7 @@ public class AvroPathPerKeyTarget extends FileTargetImpl {
     FileSystem srcFs = workingPath.getFileSystem(conf);
     Path base = new Path(workingPath, PlanningParameters.MULTI_OUTPUT_PREFIX + index);
     if (!srcFs.exists(base)) {
-      LOG.warn("Nothing to copy from " + base);
+      LOG.warn("Nothing to copy from {}", base);
       return;
     }
     Path[] keys = FileUtil.stat2Paths(srcFs.listStatus(base));

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/avro/trevni/TrevniFileReaderFactory.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/avro/trevni/TrevniFileReaderFactory.java b/crunch-core/src/main/java/org/apache/crunch/io/avro/trevni/TrevniFileReaderFactory.java
index 15bf7c1..3caa586 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/avro/trevni/TrevniFileReaderFactory.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/avro/trevni/TrevniFileReaderFactory.java
@@ -23,8 +23,6 @@ import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.reflect.ReflectData;
 import org.apache.avro.specific.SpecificData;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.CrunchRuntimeException;
 import org.apache.crunch.MapFn;
 import org.apache.crunch.fn.IdentityFn;
@@ -37,13 +35,15 @@ import org.apache.hadoop.fs.Path;
 import org.apache.trevni.Input;
 import org.apache.trevni.avro.AvroColumnReader;
 import org.apache.trevni.avro.HadoopInput;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.Iterator;
 
 public class TrevniFileReaderFactory<T> implements FileReaderFactory<T> {
 
-  private static final Log LOG = LogFactory.getLog(TrevniFileReaderFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TrevniFileReaderFactory.class);
   private final AvroType<T> aType;
   private final MapFn<T, T> mapFn;
   private final Schema schema;
@@ -99,7 +99,7 @@ public class TrevniFileReaderFactory<T> implements FileReaderFactory<T> {
         }
       });
     } catch (IOException e) {
-      LOG.info("Could not read avro file at path: " + path, e);
+      LOG.info("Could not read avro file at path: {}", path, e);
       return Iterators.emptyIterator();
     }
   }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/impl/FileSourceImpl.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/impl/FileSourceImpl.java b/crunch-core/src/main/java/org/apache/crunch/io/impl/FileSourceImpl.java
index 9917bad..b42d815 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/impl/FileSourceImpl.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/impl/FileSourceImpl.java
@@ -24,8 +24,6 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.Source;
 import org.apache.crunch.impl.mr.run.CrunchInputFormat;
 import org.apache.crunch.io.CompositePathIterable;
@@ -40,10 +38,12 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.Job;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class FileSourceImpl<T> implements Source<T> {
 
-  private static final Log LOG = LogFactory.getLog(FileSourceImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FileSourceImpl.class);
 
   @Deprecated
   protected final Path path;
@@ -74,8 +74,7 @@ public class FileSourceImpl<T> implements Source<T> {
   @Deprecated
   public Path getPath() {
     if (paths.size() > 1) {
-      LOG.warn("getPath() called for source with multiple paths, only " +
-          "returning first. Source: " + this);
+      LOG.warn("getPath() called for source with multiple paths, only returning first. Source: {}", this);
     }
     return paths.get(0);
   }
@@ -118,7 +117,7 @@ public class FileSourceImpl<T> implements Source<T> {
       try {
         size += SourceTargetHelper.getPathSize(configuration, path);
       } catch (IOException e) {
-        LOG.warn(String.format("Exception thrown looking up size of: %s", path), e);
+        LOG.warn("Exception thrown looking up size of: {}", path, e);
         throw new IllegalStateException("Failed to get the file size of:" + path, e);
       }
     }
@@ -154,7 +153,7 @@ public class FileSourceImpl<T> implements Source<T> {
           lastMod = lm;
         }
       } catch (IOException e) {
-        LOG.error("Could not determine last modification time for source: " + toString(), e);
+        LOG.error("Could not determine last modification time for source: {}", toString(), e);
       }
     }
     return lastMod;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/impl/FileTargetImpl.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/impl/FileTargetImpl.java b/crunch-core/src/main/java/org/apache/crunch/io/impl/FileTargetImpl.java
index f3fd397..e87485d 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/impl/FileTargetImpl.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/impl/FileTargetImpl.java
@@ -24,8 +24,6 @@ import java.util.regex.Pattern;
 
 import com.google.common.collect.ImmutableMap;
 import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.CrunchRuntimeException;
 import org.apache.crunch.SourceTarget;
 import org.apache.crunch.Target;
@@ -44,10 +42,12 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class FileTargetImpl implements PathTarget {
 
-  private static final Log LOG = LogFactory.getLog(FileTargetImpl.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FileTargetImpl.class);
   
   protected final Path path;
   private final FormatBundle<? extends FileOutputFormat> formatBundle;
@@ -250,40 +250,40 @@ public class FileTargetImpl implements PathTarget {
         lastModForTarget = SourceTargetHelper.getLastModifiedAt(fs, path);
       }
     } catch (IOException e) {
-      LOG.error("Exception checking existence of path: " + path, e);
+      LOG.error("Exception checking existence of path: {}", path, e);
       throw new CrunchRuntimeException(e);
     }
     
     if (exists) {
       switch (strategy) {
       case DEFAULT:
-        LOG.error("Path " + path + " already exists!");
+        LOG.error("Path {} already exists!", path);
         throw new CrunchRuntimeException("Path already exists: " + path);
       case OVERWRITE:
-        LOG.info("Removing data at existing path: " + path);
+        LOG.info("Removing data at existing path: {}", path);
         try {
           fs.delete(path, true);
         } catch (IOException e) {
-          LOG.error("Exception thrown removing data at path: " + path, e);
+          LOG.error("Exception thrown removing data at path: {}", path, e);
         }
         break;
       case APPEND:
-        LOG.info("Adding output files to existing path: " + path);
+        LOG.info("Adding output files to existing path: {}", path);
         break;
       case CHECKPOINT:
         if (successful && lastModForTarget > lastModForSource) {
-          LOG.info("Re-starting pipeline from checkpoint path: " + path);
+          LOG.info("Re-starting pipeline from checkpoint path: {}", path);
           break;
         } else {
           if (!successful) {
-            LOG.info("_SUCCESS file not found, Removing data at existing checkpoint path: " + path);
+            LOG.info("_SUCCESS file not found, Removing data at existing checkpoint path: {}", path);
           } else {
-            LOG.info("Source data has recent updates. Removing data at existing checkpoint path: " + path);
+            LOG.info("Source data has recent updates. Removing data at existing checkpoint path: {}", path);
           }
           try {
             fs.delete(path, true);
           } catch (IOException e) {
-            LOG.error("Exception thrown removing data at checkpoint path: " + path, e);
+            LOG.error("Exception thrown removing data at checkpoint path: {}", path, e);
           }
           return false;
         }
@@ -291,7 +291,7 @@ public class FileTargetImpl implements PathTarget {
         throw new CrunchRuntimeException("Unknown WriteMode:  " + strategy);
       }
     } else {
-      LOG.info("Will write output files to new path: " + path);
+      LOG.info("Will write output files to new path: {}", path);
     }
     return exists;
   }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/seq/SeqFileReaderFactory.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/seq/SeqFileReaderFactory.java b/crunch-core/src/main/java/org/apache/crunch/io/seq/SeqFileReaderFactory.java
index 3f45644..2829d87 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/seq/SeqFileReaderFactory.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/seq/SeqFileReaderFactory.java
@@ -20,10 +20,7 @@ package org.apache.crunch.io.seq;
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.MapFn;
-import org.apache.crunch.fn.IdentityFn;
 import org.apache.crunch.io.FileReaderFactory;
 import org.apache.crunch.io.impl.AutoClosingIterator;
 import org.apache.crunch.types.Converter;
@@ -39,10 +36,12 @@ import org.apache.hadoop.util.ReflectionUtils;
 
 import com.google.common.collect.Iterators;
 import com.google.common.collect.UnmodifiableIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class SeqFileReaderFactory<T> implements FileReaderFactory<T> {
 
-  private static final Log LOG = LogFactory.getLog(SeqFileReaderFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SeqFileReaderFactory.class);
 
   private final Converter converter;
   private final MapFn<Object, T> mapFn;
@@ -89,7 +88,7 @@ public class SeqFileReaderFactory<T> implements FileReaderFactory<T> {
             nextChecked = true;
             return hasNext;
           } catch (IOException e) {
-            LOG.info("Error reading from path: " + path, e);
+            LOG.info("Error reading from path: {}", path, e);
             return false;
           }
         }
@@ -104,7 +103,7 @@ public class SeqFileReaderFactory<T> implements FileReaderFactory<T> {
         }
       });
     } catch (IOException e) {
-      LOG.info("Could not read seqfile at path: " + path, e);
+      LOG.info("Could not read seqfile at path: {}", path, e);
       return Iterators.emptyIterator();
     }
   }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/text/TextFileReaderFactory.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/text/TextFileReaderFactory.java b/crunch-core/src/main/java/org/apache/crunch/io/text/TextFileReaderFactory.java
index 3077cb4..4770430 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/text/TextFileReaderFactory.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/text/TextFileReaderFactory.java
@@ -23,8 +23,6 @@ import java.io.InputStreamReader;
 import java.nio.charset.Charset;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.io.FileReaderFactory;
 import org.apache.crunch.io.impl.AutoClosingIterator;
 import org.apache.crunch.types.PType;
@@ -34,10 +32,12 @@ import org.apache.hadoop.fs.Path;
 
 import com.google.common.collect.Iterators;
 import com.google.common.collect.UnmodifiableIterator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TextFileReaderFactory<T> implements FileReaderFactory<T> {
 
-  private static final Log LOG = LogFactory.getLog(TextFileReaderFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TextFileReaderFactory.class);
 
   private final LineParser<T> parser;
 
@@ -57,7 +57,7 @@ public class TextFileReaderFactory<T> implements FileReaderFactory<T> {
     try {
       is = fs.open(path);
     } catch (IOException e) {
-      LOG.info("Could not read path: " + path, e);
+      LOG.info("Could not read path: {}", path, e);
       return Iterators.emptyIterator();
     }
 

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVFileReaderFactory.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVFileReaderFactory.java b/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVFileReaderFactory.java
index 8d28439..be7bdcc 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVFileReaderFactory.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVFileReaderFactory.java
@@ -20,21 +20,21 @@ package org.apache.crunch.io.text.csv;
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.io.FileReaderFactory;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
 import com.google.common.collect.Iterators;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The {@code FileReaderFactory} instance that is responsible for building a
  * {@code CSVRecordIterator}
  */
 public class CSVFileReaderFactory implements FileReaderFactory<String> {
-  private static final Log LOG = LogFactory.getLog(CSVFileReaderFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CSVFileReaderFactory.class);
   private final int bufferSize;
   private final String inputFileEncoding;
   private final char openQuoteChar;
@@ -94,7 +94,7 @@ public class CSVFileReaderFactory implements FileReaderFactory<String> {
       return new CSVRecordIterator(is, bufferSize, inputFileEncoding, openQuoteChar, closeQuoteChar, escapeChar,
           maximumRecordSize);
     } catch (final IOException e) {
-      LOG.info("Could not read path: " + path, e);
+      LOG.info("Could not read path: {}", path, e);
       return Iterators.emptyIterator();
     }
   }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVFileSource.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVFileSource.java b/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVFileSource.java
index d0a7631..5d5e792 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVFileSource.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVFileSource.java
@@ -91,7 +91,7 @@ public class CSVFileSource extends FileSourceImpl<String> implements ReadableSou
   /**
    * Create a new CSVFileSource instance
    * 
-   * @param path
+   * @param paths
    *          The {@code Path} to the input data
    */
   public CSVFileSource(final List<Path> paths) {

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVReadableData.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVReadableData.java b/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVReadableData.java
index b266a08..c47579b 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVReadableData.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVReadableData.java
@@ -53,11 +53,11 @@ public class CSVReadableData extends ReadableDataImpl<String> {
    *          the size of the buffer to use while parsing through the input file
    * @param inputFileEncoding
    *          the encoding for the input file
-   * @param openQuote
+   * @param openQuoteChar
    *          the character to use to open quote blocks
-   * @param closeQuote
+   * @param closeQuoteChar
    *          the character to use to close quote blocks
-   * @param escape
+   * @param escapeChar
    *          the character to use for escaping control characters and quotes
    * @param maximumRecordSize
    *          The maximum acceptable size of one CSV record. Beyond this limit,

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVRecordIterator.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVRecordIterator.java b/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVRecordIterator.java
index 447a6d0..5645344 100644
--- a/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVRecordIterator.java
+++ b/crunch-core/src/main/java/org/apache/crunch/io/text/csv/CSVRecordIterator.java
@@ -57,11 +57,11 @@ public class CSVRecordIterator implements Iterator<String>, Closeable {
    *          The size of the buffer used when reading the input stream
    * @param inputFileEncoding
    *          the encoding for the input file
-   * @param openQuote
+   * @param openQuoteChar
    *          the character to use to open quote blocks
-   * @param closeQuote
+   * @param closeQuoteChar
    *          the character to use to close quote blocks
-   * @param escape
+   * @param escapeChar
    *          the character to use for escaping control characters and quotes
    * @param maximumRecordSize
    *          The maximum acceptable size of one CSV record. Beyond this limit,

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/lib/join/BloomFilterJoinStrategy.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/lib/join/BloomFilterJoinStrategy.java b/crunch-core/src/main/java/org/apache/crunch/lib/join/BloomFilterJoinStrategy.java
index 69fe27e..8fda17c 100644
--- a/crunch-core/src/main/java/org/apache/crunch/lib/join/BloomFilterJoinStrategy.java
+++ b/crunch-core/src/main/java/org/apache/crunch/lib/join/BloomFilterJoinStrategy.java
@@ -23,9 +23,6 @@ import java.io.IOException;
 import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.EncoderFactory;
-import org.apache.avro.reflect.ReflectDatumWriter;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.CrunchRuntimeException;
 import org.apache.crunch.DoFn;
 import org.apache.crunch.Emitter;
@@ -62,8 +59,6 @@ import org.apache.hadoop.util.hash.Hash;
  * potentially costly shuffle phase for data that would never be joined to the left side.
  */
 public class BloomFilterJoinStrategy<K, U, V> implements JoinStrategy<K, U, V> {
-  
-  private static final Log LOG = LogFactory.getLog(BloomFilterJoinStrategy.class);
 
   private int vectorSize;
   private int nbHash;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/materialize/MaterializableIterable.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/materialize/MaterializableIterable.java b/crunch-core/src/main/java/org/apache/crunch/materialize/MaterializableIterable.java
index d00a4c1..f83117f 100644
--- a/crunch-core/src/main/java/org/apache/crunch/materialize/MaterializableIterable.java
+++ b/crunch-core/src/main/java/org/apache/crunch/materialize/MaterializableIterable.java
@@ -21,8 +21,6 @@ import java.io.IOException;
 import java.util.Iterator;
 
 import com.google.common.collect.Iterators;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.CrunchRuntimeException;
 import org.apache.crunch.Pipeline;
 import org.apache.crunch.PipelineResult;
@@ -31,6 +29,8 @@ import org.apache.crunch.io.PathTarget;
 import org.apache.crunch.io.ReadableSource;
 import org.apache.crunch.io.impl.FileSourceImpl;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A reference to the materialized output of a {@code PCollection} created
@@ -38,7 +38,7 @@ import org.apache.hadoop.fs.Path;
  */
 public class MaterializableIterable<E> implements Iterable<E> {
 
-  private static final Log LOG = LogFactory.getLog(MaterializableIterable.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MaterializableIterable.class);
 
   private final Pipeline pipeline;
   private final ReadableSource<E> source;
@@ -106,7 +106,7 @@ public class MaterializableIterable<E> implements Iterable<E> {
     try {
       materialized = source.read(pipeline.getConfiguration());
     } catch (IOException e) {
-      LOG.error("Could not materialize: " + source, e);
+      LOG.error("Could not materialize: {}", source, e);
       throw new CrunchRuntimeException(e);
     }
   }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/types/avro/Avros.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/types/avro/Avros.java b/crunch-core/src/main/java/org/apache/crunch/types/avro/Avros.java
index 3ba4dfb..cfdd6b7 100644
--- a/crunch-core/src/main/java/org/apache/crunch/types/avro/Avros.java
+++ b/crunch-core/src/main/java/org/apache/crunch/types/avro/Avros.java
@@ -45,8 +45,6 @@ import org.apache.avro.reflect.ReflectData;
 import org.apache.avro.specific.SpecificRecord;
 import org.apache.avro.util.Utf8;
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.MapFn;
 import org.apache.crunch.Pair;
 import org.apache.crunch.Tuple;
@@ -70,6 +68,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.TaskInputOutputContext;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Defines static methods that are analogous to the methods defined in
@@ -282,7 +282,7 @@ public class Avros {
   }
 
   private static class BytesToWritableMapFn<T extends Writable> extends MapFn<Object, T> {
-    private static final Log LOG = LogFactory.getLog(BytesToWritableMapFn.class);
+    private static final Logger LOG = LoggerFactory.getLogger(BytesToWritableMapFn.class);
 
     private final Class<T> writableClazz;
 
@@ -298,14 +298,14 @@ public class Avros {
         instance.readFields(new DataInputStream(new ByteArrayInputStream(byteBuffer.array(),
             byteBuffer.arrayOffset(), byteBuffer.limit())));
       } catch (IOException e) {
-        LOG.error("Exception thrown reading instance of: " + writableClazz, e);
+        LOG.error("Exception thrown reading instance of: {}", writableClazz, e);
       }
       return instance;
     }
   }
 
   private static class WritableToBytesMapFn<T extends Writable> extends MapFn<T, ByteBuffer> {
-    private static final Log LOG = LogFactory.getLog(WritableToBytesMapFn.class);
+    private static final Logger LOG = LoggerFactory.getLogger(WritableToBytesMapFn.class);
 
     @Override
     public ByteBuffer map(T input) {

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-core/src/main/java/org/apache/crunch/types/writable/Writables.java
----------------------------------------------------------------------
diff --git a/crunch-core/src/main/java/org/apache/crunch/types/writable/Writables.java b/crunch-core/src/main/java/org/apache/crunch/types/writable/Writables.java
index be2cb1a..5b5411b 100644
--- a/crunch-core/src/main/java/org/apache/crunch/types/writable/Writables.java
+++ b/crunch-core/src/main/java/org/apache/crunch/types/writable/Writables.java
@@ -33,8 +33,6 @@ import com.google.common.collect.BiMap;
 import com.google.common.collect.HashBiMap;
 import com.google.common.collect.ImmutableBiMap;
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.CrunchRuntimeException;
 import org.apache.crunch.MapFn;
 import org.apache.crunch.Pair;
@@ -67,6 +65,8 @@ import org.apache.hadoop.mapreduce.TaskInputOutputContext;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Defines static methods that are analogous to the methods defined in
@@ -75,7 +75,7 @@ import com.google.common.collect.Maps;
  */
 public class Writables {
 
-  private static final Log LOG = LogFactory.getLog(Writables.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Writables.class);
 
   static BiMap<Integer, Class<? extends Writable>> WRITABLE_CODES = HashBiMap.create(ImmutableBiMap.<Integer, Class<? extends Writable>>builder()
           .put(1, BytesWritable.class)
@@ -403,9 +403,8 @@ public class Writables {
         Class<Writable> clazz = ptype.getSerializationClass();
         if (WritableComparable.class.isAssignableFrom(clazz)) {
           if (!WRITABLE_CODES.inverse().containsKey(clazz)) {
-            LOG.warn(String.format(
-                "WritableComparable class %s in tuple type should be registered with Writables.registerComparable",
-                clazz.toString()));
+            LOG.warn("WritableComparable class {} in tuple type should be registered with Writables.registerComparable",
+                clazz.toString());
           }
         }
         writableClasses.add(clazz);

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-examples/pom.xml
----------------------------------------------------------------------
diff --git a/crunch-examples/pom.xml b/crunch-examples/pom.xml
index a76ca29..02980fd 100644
--- a/crunch-examples/pom.xml
+++ b/crunch-examples/pom.xml
@@ -50,16 +50,10 @@ under the License.
       <scope>provided</scope>
     </dependency>
 
-    <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-      <scope>provided</scope>
-    </dependency>
 
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
-      <scope>provided</scope>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
     </dependency>
 
     <!-- Required for running LocalJobRunner from the IDE, but missing

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-examples/src/main/java/org/apache/crunch/examples/WordAggregationHBase.java
----------------------------------------------------------------------
diff --git a/crunch-examples/src/main/java/org/apache/crunch/examples/WordAggregationHBase.java b/crunch-examples/src/main/java/org/apache/crunch/examples/WordAggregationHBase.java
index fc95359..b2d24f8 100644
--- a/crunch-examples/src/main/java/org/apache/crunch/examples/WordAggregationHBase.java
+++ b/crunch-examples/src/main/java/org/apache/crunch/examples/WordAggregationHBase.java
@@ -23,8 +23,6 @@ import java.util.Arrays;
 import java.util.List;
 
 import com.google.common.collect.Lists;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.DoFn;
 import org.apache.crunch.Emitter;
 import org.apache.crunch.PCollection;
@@ -53,6 +51,8 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
@@ -63,7 +63,7 @@ import org.apache.hadoop.util.ToolRunner;
  */
 @SuppressWarnings("serial")
 public class WordAggregationHBase extends Configured implements Tool, Serializable {
-  private static final Log LOG = LogFactory.getLog(WordAggregationHBase.class);
+  private static final Logger LOG = LoggerFactory.getLogger(WordAggregationHBase.class);
 
   // Configuration parameters. Here configured for a hbase instance running
   // locally

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-hbase/pom.xml
----------------------------------------------------------------------
diff --git a/crunch-hbase/pom.xml b/crunch-hbase/pom.xml
index c927fb7..0d0acd1 100644
--- a/crunch-hbase/pom.xml
+++ b/crunch-hbase/pom.xml
@@ -40,8 +40,8 @@ under the License.
     </dependency>
 
     <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
     </dependency>
 
     <dependency>
@@ -106,12 +106,6 @@ under the License.
 
     <dependency>
       <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-api</artifactId>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.slf4j</groupId>
       <artifactId>slf4j-log4j12</artifactId>
       <scope>test</scope>
     </dependency>

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HBaseSourceTarget.java
----------------------------------------------------------------------
diff --git a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HBaseSourceTarget.java b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HBaseSourceTarget.java
index c3ecfd7..a957898 100644
--- a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HBaseSourceTarget.java
+++ b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HBaseSourceTarget.java
@@ -20,8 +20,6 @@ package org.apache.crunch.io.hbase;
 import java.io.IOException;
 
 import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.Pair;
 import org.apache.crunch.ReadableData;
 import org.apache.crunch.Source;
@@ -42,11 +40,8 @@ import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.mapreduce.KeyValueSerialization;
 import org.apache.hadoop.hbase.mapreduce.MultiTableInputFormat;
-import org.apache.hadoop.hbase.mapreduce.MutationSerialization;
 import org.apache.hadoop.hbase.mapreduce.ResultSerialization;
-import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
@@ -56,12 +51,14 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.collect.ObjectArrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class HBaseSourceTarget extends HBaseTarget implements
     ReadableSourceTarget<Pair<ImmutableBytesWritable, Result>>,
     TableSource<ImmutableBytesWritable, Result> {
 
-  private static final Log LOG = LogFactory.getLog(HBaseSourceTarget.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HBaseSourceTarget.class);
   
   private static final PTableType<ImmutableBytesWritable, Result> PTYPE = Writables.tableOf(
       Writables.writables(ImmutableBytesWritable.class), HBaseTypes.results());
@@ -174,7 +171,7 @@ public class HBaseSourceTarget extends HBaseTarget implements
 
   @Override
   public long getLastModifiedAt(Configuration configuration) {
-    LOG.warn("Cannot determine last modified time for source: " + toString());
+    LOG.warn("Cannot determine last modified time for source: {}", toString());
     return -1;
   }
 

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HBaseTarget.java
----------------------------------------------------------------------
diff --git a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HBaseTarget.java b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HBaseTarget.java
index 60ff746..1119fae 100644
--- a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HBaseTarget.java
+++ b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HBaseTarget.java
@@ -22,8 +22,6 @@ import java.util.Map;
 
 import com.google.common.collect.Maps;
 import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.CrunchRuntimeException;
 import org.apache.crunch.SourceTarget;
 import org.apache.crunch.Target;
@@ -44,10 +42,12 @@ import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class HBaseTarget implements MapReduceTarget {
 
-  private static final Log LOG = LogFactory.getLog(HBaseTarget.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HBaseTarget.class);
   
   protected String table;
   private Map<String, String> extraConf = Maps.newHashMap();

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileInputFormat.java
----------------------------------------------------------------------
diff --git a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileInputFormat.java b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileInputFormat.java
index 43005ab..ca886f6 100644
--- a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileInputFormat.java
+++ b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileInputFormat.java
@@ -19,22 +19,15 @@ package org.apache.crunch.io.hbase;
 
 import org.apache.commons.codec.DecoderException;
 import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.fs.HFileSystem;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
-import org.apache.hadoop.hbase.io.hfile.FixedFileTrailer;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
-import org.apache.hadoop.hbase.io.hfile.HFileReaderV2;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.NullWritable;
@@ -44,6 +37,8 @@ import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -54,7 +49,7 @@ import java.util.List;
  */
 public class HFileInputFormat extends FileInputFormat<NullWritable, KeyValue> {
 
-  private static final Log LOG = LogFactory.getLog(HFileInputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HFileInputFormat.class);
   static final String START_ROW_KEY = "crunch.hbase.hfile.input.format.start.row";
   static final String STOP_ROW_KEY = "crunch.hbase.hfile.input.format.stop.row";
 
@@ -95,7 +90,7 @@ public class HFileInputFormat extends FileInputFormat<NullWritable, KeyValue> {
       conf = context.getConfiguration();
       Path path = fileSplit.getPath();
       FileSystem fs = path.getFileSystem(conf);
-      LOG.info("Initialize HFileRecordReader for " + path);
+      LOG.info("Initialize HFileRecordReader for {}", path);
       this.in = HFile.createReader(fs, path, new CacheConfig(conf), conf);
 
       // The file info must be loaded before the scanner can be used.
@@ -129,7 +124,9 @@ public class HFileInputFormat extends FileInputFormat<NullWritable, KeyValue> {
       boolean hasNext;
       if (!seeked) {
         if (startRow != null) {
-          LOG.info("Seeking to start row " + Bytes.toStringBinary(startRow));
+          if(LOG.isInfoEnabled()) {
+            LOG.info("Seeking to start row {}", Bytes.toStringBinary(startRow));
+          }
           KeyValue kv = KeyValue.createFirstOnRow(startRow);
           hasNext = seekAtOrAfter(scanner, kv);
         } else {
@@ -147,7 +144,9 @@ public class HFileInputFormat extends FileInputFormat<NullWritable, KeyValue> {
       if (stopRow != null && Bytes.compareTo(
           value.getBuffer(), value.getRowOffset(), value.getRowLength(),
           stopRow, 0, stopRow.length) >= 0) {
-        LOG.info("Reached stop row " + Bytes.toStringBinary(stopRow));
+        if(LOG.isInfoEnabled()) {
+          LOG.info("Reached stop row {}", Bytes.toStringBinary(stopRow));
+        }
         reachedStopRow = true;
         value = null;
         return false;

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileOutputFormatForCrunch.java
----------------------------------------------------------------------
diff --git a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileOutputFormatForCrunch.java b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileOutputFormatForCrunch.java
index ad68da4..0c64e5e 100644
--- a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileOutputFormatForCrunch.java
+++ b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileOutputFormatForCrunch.java
@@ -21,8 +21,6 @@ package org.apache.crunch.io.hbase;
 
 import org.apache.commons.codec.DecoderException;
 import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -31,8 +29,6 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
-import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
-import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.regionserver.TimeRangeTracker;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -40,6 +36,8 @@ import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
@@ -59,7 +57,7 @@ public class HFileOutputFormatForCrunch extends FileOutputFormat<Object, KeyValu
 
   public static final String HCOLUMN_DESCRIPTOR_KEY = "hbase.hfileoutputformat.column.descriptor";
   private static final String COMPACTION_EXCLUDE_CONF_KEY = "hbase.mapreduce.hfileoutputformat.compaction.exclude";
-  private static final Log LOG = LogFactory.getLog(HFileOutputFormatForCrunch.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HFileOutputFormatForCrunch.class);
 
   private final byte [] now = Bytes.toBytes(System.currentTimeMillis());
   private final TimeRangeTracker trt = new TimeRangeTracker();
@@ -86,8 +84,8 @@ public class HFileOutputFormatForCrunch extends FileOutputFormat<Object, KeyValu
     }
     HColumnDescriptor hcol = new HColumnDescriptor();
     hcol.readFields(new DataInputStream(new ByteArrayInputStream(hcolBytes)));
-    LOG.info("Output path: " + outputPath);
-    LOG.info("HColumnDescriptor: " + hcol.toString());
+    LOG.info("Output path: {}", outputPath);
+    LOG.info("HColumnDescriptor: {}", hcol.toString());
     final HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
         .withPath(fs, outputPath)
         .withComparator(KeyValue.COMPARATOR)

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileSource.java
----------------------------------------------------------------------
diff --git a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileSource.java b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileSource.java
index 47abe9a..bd3cc8f 100644
--- a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileSource.java
+++ b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileSource.java
@@ -20,8 +20,6 @@ package org.apache.crunch.io.hbase;
 import com.google.common.base.Objects;
 import com.google.common.collect.ImmutableList;
 import org.apache.commons.codec.binary.Hex;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.impl.mr.run.RuntimeParameters;
 import org.apache.crunch.io.FormatBundle;
 import org.apache.crunch.io.ReadableSource;
@@ -42,13 +40,15 @@ import org.apache.hadoop.hbase.mapreduce.MutationSerialization;
 import org.apache.hadoop.hbase.mapreduce.ResultSerialization;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.mapreduce.Job;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.List;
 
 public class HFileSource extends FileSourceImpl<KeyValue> implements ReadableSource<KeyValue> {
 
-  private static final Log LOG = LogFactory.getLog(HFileSource.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HFileSource.class);
   private static final PType<KeyValue> KEY_VALUE_PTYPE = HBaseTypes.keyValues();
 
   public HFileSource(Path path) {
@@ -122,9 +122,9 @@ public class HFileSource extends FileSourceImpl<KeyValue> implements ReadableSou
       try {
         sum += getSizeInternal(conf, path);
       } catch (IOException e) {
-        LOG.warn("Failed to estimate size of " + path);
+        LOG.warn("Failed to estimate size of {}", path);
       }
-      System.out.println("Size after read of path = " + path.toString() + " = " + sum);
+      LOG.info("Size after read of path = {} = {}", path.toString(), sum);
     }
     return sum;
   }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileUtils.java
----------------------------------------------------------------------
diff --git a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileUtils.java b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileUtils.java
index 63d2286..252bad7 100644
--- a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileUtils.java
+++ b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HFileUtils.java
@@ -36,8 +36,6 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Lists;
 import com.google.common.primitives.Longs;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.DoFn;
 import org.apache.crunch.Emitter;
 import org.apache.crunch.FilterFn;
@@ -64,10 +62,12 @@ import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.SequenceFile;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public final class HFileUtils {
 
-  private static final Log LOG = LogFactory.getLog(HFileUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HFileUtils.class);
 
   /** Compares {@code KeyValue} by its family, qualifier, timestamp (reversely), type (reversely) and memstoreTS. */
   private static final Comparator<KeyValue> KEY_VALUE_COMPARATOR = new Comparator<KeyValue>() {
@@ -365,7 +365,7 @@ public final class HFileUtils {
       Path outputPath) throws IOException {
     HColumnDescriptor[] families = table.getTableDescriptor().getColumnFamilies();
     if (families.length == 0) {
-      LOG.warn(table + "has no column families");
+      LOG.warn("{} has no column families", table);
       return;
     }
     for (HColumnDescriptor f : families) {
@@ -430,7 +430,7 @@ public final class HFileUtils {
       Configuration conf,
       Path path,
       List<KeyValue> splitPoints) throws IOException {
-    LOG.info("Writing " + splitPoints.size() + " split points to " + path);
+    LOG.info("Writing {} split points to {}", splitPoints.size(), path);
     SequenceFile.Writer writer = SequenceFile.createWriter(
         path.getFileSystem(conf),
         conf,

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HTableIterator.java
----------------------------------------------------------------------
diff --git a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HTableIterator.java b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HTableIterator.java
index 22057bb..3db5897 100644
--- a/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HTableIterator.java
+++ b/crunch-hbase/src/main/java/org/apache/crunch/io/hbase/HTableIterator.java
@@ -19,21 +19,21 @@
  */
 package org.apache.crunch.io.hbase;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.crunch.Pair;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.List;
 
 class HTableIterator implements Iterator<Pair<ImmutableBytesWritable, Result>> {
-  private static final Log LOG = LogFactory.getLog(HTableIterator.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HTableIterator.class);
 
   private final HTable table;
   private final Iterator<Scan> scans;
@@ -68,7 +68,7 @@ class HTableIterator implements Iterator<Pair<ImmutableBytesWritable, Result>> {
         try {
           table.close();
         } catch (IOException e) {
-          LOG.error("Exception closing HTable: " + table.getName(), e);
+          LOG.error("Exception closing HTable: {}", table.getName(), e);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-hive/src/main/java/org/apache/crunch/types/orc/OrcUtils.java
----------------------------------------------------------------------
diff --git a/crunch-hive/src/main/java/org/apache/crunch/types/orc/OrcUtils.java b/crunch-hive/src/main/java/org/apache/crunch/types/orc/OrcUtils.java
index 8d9c806..a0b4a0e 100644
--- a/crunch-hive/src/main/java/org/apache/crunch/types/orc/OrcUtils.java
+++ b/crunch-hive/src/main/java/org/apache/crunch/types/orc/OrcUtils.java
@@ -77,7 +77,7 @@ public class OrcUtils {
   /**
    * Create an object of OrcStruct given a type string and a list of objects
    * 
-   * @param typeStr
+   * @param typeInfo
    * @param objs
    * @return
    */

http://git-wip-us.apache.org/repos/asf/crunch/blob/82cecc0c/crunch-spark/pom.xml
----------------------------------------------------------------------
diff --git a/crunch-spark/pom.xml b/crunch-spark/pom.xml
index 1060a84..6ab5db5 100644
--- a/crunch-spark/pom.xml
+++ b/crunch-spark/pom.xml
@@ -60,7 +60,12 @@ under the License.
     </dependency>
     <dependency>
       <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
       <artifactId>slf4j-log4j12</artifactId>
+      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.crunch</groupId>


Mime
View raw message