parquet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jul...@apache.org
Subject [2/4] parquet-mr git commit: PARQUET-423: Replace old Log class with SLF4J Logging
Date Wed, 26 Oct 2016 16:09:56 GMT
http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetInputFormat.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetInputFormat.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetInputFormat.java
index 1fe57f9..7c5b5be 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetInputFormat.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetInputFormat.java
@@ -51,7 +51,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.apache.parquet.Log;
 import org.apache.parquet.Preconditions;
 import org.apache.parquet.filter.UnboundRecordFilter;
 import org.apache.parquet.filter2.compat.FilterCompat;
@@ -72,6 +71,8 @@ import org.apache.parquet.hadoop.util.SerializationUtil;
 import org.apache.parquet.io.ParquetDecodingException;
 import org.apache.parquet.schema.MessageType;
 import org.apache.parquet.schema.MessageTypeParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The input format to read a Parquet file.
@@ -93,7 +94,7 @@ import org.apache.parquet.schema.MessageTypeParser;
  */
 public class ParquetInputFormat<T> extends FileInputFormat<Void, T> {
 
-  private static final Log LOG = Log.getLog(ParquetInputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParquetInputFormat.class);
 
   /**
    * key to configure the ReadSupport implementation
@@ -382,7 +383,7 @@ public class ParquetInputFormat<T> extends FileInputFormat<Void, T> {
         result.add(file);
       }
     }
-    LOG.info("Total input paths to process : " + result.size());
+    LOG.info("Total input paths to process : {}", result.size());
     return result;
   }
 
@@ -424,7 +425,7 @@ public class ParquetInputFormat<T> extends FileInputFormat<Void, T> {
       FileStatusWrapper statusWrapper = new FileStatusWrapper(status);
       FootersCacheValue cacheEntry =
               footersCache.getCurrentValue(statusWrapper);
-      if (Log.DEBUG) {
+      if (LOG.isDebugEnabled()) {
         LOG.debug("Cache entry " + (cacheEntry == null ? "not " : "")
                 + " found for '" + status.getPath() + "'");
       }
@@ -436,10 +437,8 @@ public class ParquetInputFormat<T> extends FileInputFormat<Void, T> {
         missingStatusesMap.put(status.getPath(), statusWrapper);
       }
     }
-    if (Log.DEBUG) {
-      LOG.debug("found " + footersMap.size() + " footers in cache and adding up "
-              + "to " + missingStatuses.size() + " missing footers to the cache");
-    }
+    LOG.debug("found {} footers in cache and adding up to {} missing footers to the cache",
+            footersMap.size() ,missingStatuses.size());
 
     if (!missingStatuses.isEmpty()) {
       List<Footer> newFooters = getFooters(config, missingStatuses);
@@ -480,7 +479,7 @@ public class ParquetInputFormat<T> extends FileInputFormat<Void, T> {
    * @throws IOException
    */
   public List<Footer> getFooters(Configuration configuration, Collection<FileStatus> statuses) throws IOException {
-    if (Log.DEBUG) LOG.debug("reading " + statuses.size() + " files");
+    LOG.debug("reading {} files", statuses.size());
     boolean taskSideMetaData = isTaskSideMetaData(configuration);
     return ParquetFileReader.readAllFootersInParallelUsingSummaryFiles(configuration, statuses, taskSideMetaData);
   }
@@ -513,10 +512,9 @@ public class ParquetInputFormat<T> extends FileInputFormat<Void, T> {
     public boolean isCurrent(FileStatusWrapper key) {
       long currentModTime = key.getModificationTime();
       boolean isCurrent = modificationTime >= currentModTime;
-      if (Log.DEBUG && !isCurrent) {
-        LOG.debug("The cache value for '" + key + "' is not current: "
-                + "cached modification time=" + modificationTime + ", "
-                + "current modification time: " + currentModTime);
+      if (LOG.isDebugEnabled() && !isCurrent) {
+        LOG.debug("The cache value for '{}' is not current: cached modification time={}, current modification time: {}",
+                key, modificationTime, currentModTime);
       }
       return isCurrent;
     }
@@ -689,7 +687,7 @@ class ClientSideMetadataSplitStrategy {
     }
   }
 
-  private static final Log LOG = Log.getLog(ClientSideMetadataSplitStrategy.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ClientSideMetadataSplitStrategy.class);
 
   List<ParquetInputSplit> getSplits(Configuration configuration, List<Footer> footers,
       long maxSplitSize, long minSplitSize, ReadContext readContext)
@@ -702,7 +700,7 @@ class ClientSideMetadataSplitStrategy {
 
     for (Footer footer : footers) {
       final Path file = footer.getFile();
-      LOG.debug(file);
+      LOG.debug("{}", file);
       FileSystem fs = file.getFileSystem(configuration);
       FileStatus fileStatus = fs.getFileStatus(file);
       ParquetMetadata parquetMetaData = footer.getParquetMetadata();
@@ -733,7 +731,7 @@ class ClientSideMetadataSplitStrategy {
 
     if (rowGroupsDropped > 0 && totalRowGroups > 0) {
       int percentDropped = (int) ((((double) rowGroupsDropped) / totalRowGroups) * 100);
-      LOG.info("Dropping " + rowGroupsDropped + " row groups that do not pass filter predicate! (" + percentDropped + "%)");
+      LOG.info("Dropping {} row groups that do not pass filter predicate! ({}%)", rowGroupsDropped, percentDropped);
     } else {
       LOG.info("There were no row groups that could be dropped due to filter predicates");
     }

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputCommitter.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputCommitter.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputCommitter.java
index 45455ef..facb978 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputCommitter.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputCommitter.java
@@ -29,12 +29,13 @@ import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.hadoop.ParquetOutputFormat.JobSummaryLevel;
 import org.apache.parquet.hadoop.util.ContextUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class ParquetOutputCommitter extends FileOutputCommitter {
-  private static final Log LOG = Log.getLog(ParquetOutputCommitter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParquetOutputCommitter.class);
 
   private final Path outputPath;
 

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputFormat.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputFormat.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputFormat.java
index d05d41f..bd20360 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputFormat.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputFormat.java
@@ -18,7 +18,6 @@
  */
 package org.apache.parquet.hadoop;
 
-import static org.apache.parquet.Log.INFO;
 import static org.apache.parquet.Preconditions.checkNotNull;
 import static org.apache.parquet.hadoop.ParquetWriter.DEFAULT_BLOCK_SIZE;
 import static org.apache.parquet.hadoop.util.ContextUtil.getConfiguration;
@@ -35,7 +34,6 @@ import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.column.ParquetProperties;
 import org.apache.parquet.column.ParquetProperties.WriterVersion;
 import org.apache.parquet.hadoop.ParquetFileWriter.Mode;
@@ -44,6 +42,8 @@ import org.apache.parquet.hadoop.api.WriteSupport.WriteContext;
 import org.apache.parquet.hadoop.codec.CodecConfig;
 import org.apache.parquet.hadoop.metadata.CompressionCodecName;
 import org.apache.parquet.hadoop.util.ConfigurationUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * OutputFormat to write to a Parquet file
@@ -101,7 +101,7 @@ import org.apache.parquet.hadoop.util.ConfigurationUtil;
  * @param <T> the type of the materialized records
  */
 public class ParquetOutputFormat<T> extends FileOutputFormat<Void, T> {
-  private static final Log LOG = Log.getLog(ParquetOutputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParquetOutputFormat.class);
 
   public static enum JobSummaryLevel {
     /**
@@ -373,17 +373,17 @@ public class ParquetOutputFormat<T> extends FileOutputFormat<Void, T> {
     int maxPaddingSize = getMaxPaddingSize(conf);
     boolean validating = getValidation(conf);
 
-    if (INFO) {
-      LOG.info("Parquet block size to " + blockSize);
-      LOG.info("Parquet page size to " + props.getPageSizeThreshold());
-      LOG.info("Parquet dictionary page size to " + props.getDictionaryPageSizeThreshold());
-      LOG.info("Dictionary is " + (props.isEnableDictionary() ? "on" : "off"));
-      LOG.info("Validation is " + (validating ? "on" : "off"));
-      LOG.info("Writer version is: " + props.getWriterVersion());
-      LOG.info("Maximum row group padding size is " + maxPaddingSize + " bytes");
-      LOG.info("Page size checking is: " + (props.estimateNextSizeCheck() ? "estimated" : "constant"));
-      LOG.info("Min row count for page size check is: " + props.getMinRowCountForPageSizeCheck());
-      LOG.info("Max row count for page size check is: " + props.getMaxRowCountForPageSizeCheck());
+    if (LOG.isInfoEnabled()) {
+      LOG.info("Parquet block size to {}", blockSize);
+      LOG.info("Parquet page size to {}", props.getPageSizeThreshold());
+      LOG.info("Parquet dictionary page size to {}", props.getDictionaryPageSizeThreshold());
+      LOG.info("Dictionary is {}", (props.isEnableDictionary() ? "on" : "off"));
+      LOG.info("Validation is {}", (validating ? "on" : "off"));
+      LOG.info("Writer version is: {}", props.getWriterVersion());
+      LOG.info("Maximum row group padding size is {} bytes", maxPaddingSize);
+      LOG.info("Page size checking is: {}", (props.estimateNextSizeCheck() ? "estimated" : "constant"));
+      LOG.info("Min row count for page size check is: {}", props.getMinRowCountForPageSizeCheck());
+      LOG.info("Max row count for page size check is: {}", props.getMaxRowCountForPageSizeCheck());
     }
 
     WriteContext init = writeSupport.init(conf);

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetRecordReader.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetRecordReader.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetRecordReader.java
index eae3b4e..f2f656d 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetRecordReader.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetRecordReader.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.mapreduce.TaskInputOutputContext;
 
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.apache.parquet.CorruptDeltaByteArrays;
-import org.apache.parquet.Log;
 import org.apache.parquet.column.Encoding;
 import org.apache.parquet.filter.UnboundRecordFilter;
 import org.apache.parquet.filter2.compat.FilterCompat;
@@ -55,6 +54,8 @@ import org.apache.parquet.hadoop.metadata.FileMetaData;
 import org.apache.parquet.hadoop.util.ContextUtil;
 import org.apache.parquet.hadoop.util.counters.BenchmarkCounter;
 import org.apache.parquet.io.ParquetDecodingException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Reads the records from a block of a Parquet file
@@ -67,7 +68,7 @@ import org.apache.parquet.io.ParquetDecodingException;
  */
 public class ParquetRecordReader<T> extends RecordReader<Void, T> {
 
-  private static final Log LOG = Log.getLog(ParquetRecordReader.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParquetRecordReader.class);
   private final InternalParquetRecordReader<T> internalReader;
 
   /**

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/UnmaterializableRecordCounter.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/UnmaterializableRecordCounter.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/UnmaterializableRecordCounter.java
index c4de8f3..4696319 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/UnmaterializableRecordCounter.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/UnmaterializableRecordCounter.java
@@ -20,9 +20,10 @@ package org.apache.parquet.hadoop;
 
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.parquet.Log;
 import org.apache.parquet.io.ParquetDecodingException;
 import org.apache.parquet.io.api.RecordMaterializer.RecordMaterializationException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Essentially taken from:
 // https://github.com/twitter/elephant-bird/blob/master/core/src/main/java/com/twitter/elephantbird/mapreduce/input/LzoRecordReader.java#L124
@@ -43,7 +44,7 @@ public class UnmaterializableRecordCounter {
   /* Tolerated percent bad records */
   public static final String BAD_RECORD_THRESHOLD_CONF_KEY = "parquet.read.bad.record.threshold";
 
-  private static final Log LOG = Log.getLog(UnmaterializableRecordCounter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(UnmaterializableRecordCounter.class);
 
   private static final float DEFAULT_THRESHOLD =  0f;
 

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/CodecConfig.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/CodecConfig.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/CodecConfig.java
index 9657865..e0907f9 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/CodecConfig.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/CodecConfig.java
@@ -22,13 +22,12 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.parquet.Log;
 import org.apache.parquet.hadoop.ParquetOutputFormat;
 import org.apache.parquet.hadoop.metadata.CompressionCodecName;
 import org.apache.parquet.hadoop.util.ContextUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import static org.apache.parquet.Log.INFO;
-import static org.apache.parquet.Log.WARN;
 import static org.apache.parquet.hadoop.metadata.CompressionCodecName.UNCOMPRESSED;
 
 /**
@@ -39,7 +38,7 @@ import static org.apache.parquet.hadoop.metadata.CompressionCodecName.UNCOMPRESS
  * @author Tianshuo Deng
  */
 public abstract class CodecConfig {
-  private static final Log LOG = Log.getLog(CodecConfig.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CodecConfig.class);
 
   /**
    * @return if a compress flag is set from hadoop
@@ -89,11 +88,11 @@ public abstract class CodecConfig {
     } else if (isHadoopCompressionSet()) { // from hadoop config
       codec = getHadoopCompressionCodec();
     } else {
-      if (INFO) LOG.info("Compression set to false");
+      LOG.info("Compression set to false");
       codec = CompressionCodecName.UNCOMPRESSED;
     }
 
-    if (INFO) LOG.info("Compression: " + codec.name());
+    LOG.info("Compression: {}", codec.name());
     return codec;
   }
 
@@ -102,14 +101,13 @@ public abstract class CodecConfig {
     try {
       // find the right codec
       Class<?> codecClass = getHadoopOutputCompressorClass(CompressionCodecName.UNCOMPRESSED.getHadoopCompressionCodecClass());
-      if (INFO) LOG.info("Compression set through hadoop codec: " + codecClass.getName());
+      LOG.info("Compression set through hadoop codec: {}", codecClass.getName());
       codec = CompressionCodecName.fromCompressionCodec(codecClass);
     } catch (CompressionCodecNotSupportedException e) {
-      if (WARN)
-        LOG.warn("codec defined in hadoop config is not supported by parquet [" + e.getCodecClass().getName() + "] and will use UNCOMPRESSED", e);
+      LOG.warn("codec defined in hadoop config is not supported by parquet [{}] and will use UNCOMPRESSED",e.getCodecClass().getName(), e);
       codec = CompressionCodecName.UNCOMPRESSED;
     } catch (IllegalArgumentException e) {
-      if (WARN) LOG.warn("codec class not found: " + e.getMessage(), e);
+      LOG.warn("codec class not found: {}", e.getMessage(), e);
       codec = CompressionCodecName.UNCOMPRESSED;
     }
     return codec;

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/HadoopStreams.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/HadoopStreams.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/HadoopStreams.java
index 7c321cd..8731bd6 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/HadoopStreams.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/HadoopStreams.java
@@ -20,9 +20,11 @@
 package org.apache.parquet.hadoop.util;
 
 import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.parquet.Log;
 import org.apache.parquet.io.ParquetDecodingException;
 import org.apache.parquet.io.SeekableInputStream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
 
@@ -31,7 +33,7 @@ import java.lang.reflect.InvocationTargetException;
  */
 public class HadoopStreams {
 
-  private static final Log LOG = Log.getLog(HadoopStreams.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HadoopStreams.class);
 
   private static final Class<?> byteBufferReadableClass = getReadableClass();
   static final Constructor<SeekableInputStream> h2SeekableConstructor = getH2SeekableConstructor();

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/SerializationUtil.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/SerializationUtil.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/SerializationUtil.java
index ec413ac..ffbe2a7 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/SerializationUtil.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/SerializationUtil.java
@@ -30,7 +30,8 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.conf.Configuration;
 
 import org.apache.parquet.Closeables;
-import org.apache.parquet.Log;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Serialization utils copied from:
@@ -39,7 +40,7 @@ import org.apache.parquet.Log;
  * TODO: Refactor elephant-bird so that we can depend on utils like this without extra baggage.
  */
 public final class SerializationUtil {
-  private static final Log LOG = Log.getLog(SerializationUtil.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SerializationUtil.class);
 
   private SerializationUtil() { }
 

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetFileWriter.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetFileWriter.java b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetFileWriter.java
index c56515f..00e0c68 100644
--- a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetFileWriter.java
+++ b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetFileWriter.java
@@ -30,7 +30,6 @@ import org.apache.parquet.hadoop.ParquetOutputFormat.JobSummaryLevel;
 import org.junit.Assume;
 import org.junit.Rule;
 import org.junit.Test;
-import org.apache.parquet.Log;
 import org.apache.parquet.bytes.BytesInput;
 import org.apache.parquet.column.ColumnDescriptor;
 import org.apache.parquet.column.Encoding;
@@ -68,10 +67,12 @@ import org.apache.parquet.example.data.simple.SimpleGroup;
 import org.apache.parquet.hadoop.example.GroupWriteSupport;
 import org.junit.rules.TemporaryFolder;
 import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestParquetFileWriter {
 
-  private static final Log LOG = Log.getLog(TestParquetFileWriter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestParquetFileWriter.class);
 
   private static final MessageType SCHEMA = MessageTypeParser.parseMessageType("" +
       "message m {" +
@@ -608,7 +609,7 @@ public class TestParquetFileWriter {
   }
 
   private void validateFooters(final List<Footer> metadata) {
-    LOG.debug(metadata);
+    LOG.debug("{}", metadata);
     assertEquals(String.valueOf(metadata), 3, metadata.size());
     for (Footer footer : metadata) {
       final File file = new File(footer.getFile().toUri());

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
index 5ca041b..d1b5267 100644
--- a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
+++ b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
@@ -53,7 +53,6 @@ import org.apache.parquet.filter2.predicate.FilterApi;
 import org.junit.Before;
 import org.junit.Test;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.example.data.Group;
 import org.apache.parquet.example.data.simple.SimpleGroupFactory;
 import org.apache.parquet.hadoop.ParquetInputFormat;
@@ -65,9 +64,11 @@ import org.apache.parquet.hadoop.api.ReadSupport;
 import org.apache.parquet.hadoop.metadata.CompressionCodecName;
 import org.apache.parquet.hadoop.util.ContextUtil;
 import org.apache.parquet.schema.MessageTypeParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestInputOutputFormat {
-  private static final Log LOG = Log.getLog(TestInputOutputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestInputOutputFormat.class);
   private static final Charset UTF_8 = Charset.forName("UTF-8");
   final Path parquetPath = new Path("target/test/example/TestInputOutputFormat/parquet");
   final Path inputPath = new Path("src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java");
@@ -348,10 +349,10 @@ public class TestInputOutputFormat {
 
   private void waitForJob(Job job) throws InterruptedException, IOException {
     while (!job.isComplete()) {
-      LOG.debug("waiting for job " + job.getJobName());
+      LOG.debug("waiting for job {}", job.getJobName());
       sleep(100);
     }
-    LOG.info("status for job " + job.getJobName() + ": " + (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
+    LOG.info("status for job {}: {}", job.getJobName(), (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
     if (!job.isSuccessful()) {
       throw new RuntimeException("job failed " + job.getJobName());
     }

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hadoop/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/parquet-hadoop/src/test/resources/log4j.properties b/parquet-hadoop/src/test/resources/log4j.properties
new file mode 100644
index 0000000..678fd66
--- /dev/null
+++ b/parquet-hadoop/src/test/resources/log4j.properties
@@ -0,0 +1,24 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+log4j.rootLogger=INFO, stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.threshold=INFO
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %-5p %30c{1}:%4L - %m%n

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hive/parquet-hive-binding/parquet-hive-0.10-binding/src/main/java/org/apache/parquet/hive/internal/Hive010Binding.java
----------------------------------------------------------------------
diff --git a/parquet-hive/parquet-hive-binding/parquet-hive-0.10-binding/src/main/java/org/apache/parquet/hive/internal/Hive010Binding.java b/parquet-hive/parquet-hive-binding/parquet-hive-0.10-binding/src/main/java/org/apache/parquet/hive/internal/Hive010Binding.java
index 7d4ecc0..23ef5d4 100644
--- a/parquet-hive/parquet-hive-binding/parquet-hive-0.10-binding/src/main/java/org/apache/parquet/hive/internal/Hive010Binding.java
+++ b/parquet-hive/parquet-hive-binding/parquet-hive-0.10-binding/src/main/java/org/apache/parquet/hive/internal/Hive010Binding.java
@@ -37,8 +37,9 @@ import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapred.JobConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import org.apache.parquet.Log;
 
 /**
  * Hive 0.10 implementation of {@link org.apache.parquet.hive.HiveBinding HiveBinding}.
@@ -46,7 +47,7 @@ import org.apache.parquet.Log;
  * <a href="http://bit.ly/1a4tcrb">ManageJobConfig</a> class.
  */
 public class Hive010Binding extends AbstractHiveBinding {
-  private static final Log LOG = Log.getLog(Hive010Binding.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Hive010Binding.class);
   private final Map<String, PartitionDesc> pathToPartitionInfo =
       new LinkedHashMap<String, PartitionDesc>();
   /**

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hive/parquet-hive-binding/parquet-hive-0.12-binding/src/main/java/org/apache/parquet/hive/internal/Hive012Binding.java
----------------------------------------------------------------------
diff --git a/parquet-hive/parquet-hive-binding/parquet-hive-0.12-binding/src/main/java/org/apache/parquet/hive/internal/Hive012Binding.java b/parquet-hive/parquet-hive-binding/parquet-hive-0.12-binding/src/main/java/org/apache/parquet/hive/internal/Hive012Binding.java
index 37a2cd4..f65f7a5 100644
--- a/parquet-hive/parquet-hive-binding/parquet-hive-0.12-binding/src/main/java/org/apache/parquet/hive/internal/Hive012Binding.java
+++ b/parquet-hive/parquet-hive-binding/parquet-hive-0.12-binding/src/main/java/org/apache/parquet/hive/internal/Hive012Binding.java
@@ -38,8 +38,9 @@ import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapred.JobConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import org.apache.parquet.Log;
 
 /**
  * Hive 0.12 implementation of {@link org.apache.parquet.hive.HiveBinding HiveBinding}.
@@ -47,7 +48,7 @@ import org.apache.parquet.Log;
  * <a href="http://bit.ly/1a4tcrb">ManageJobConfig</a> class.
  */
 public class Hive012Binding extends AbstractHiveBinding {
-  private static final Log LOG = Log.getLog(Hive012Binding.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Hive012Binding.class);
   private final Map<String, PartitionDesc> pathToPartitionInfo =
       new LinkedHashMap<String, PartitionDesc>();
   /**

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hive/parquet-hive-binding/parquet-hive-binding-factory/src/main/java/org/apache/parquet/hive/HiveBindingFactory.java
----------------------------------------------------------------------
diff --git a/parquet-hive/parquet-hive-binding/parquet-hive-binding-factory/src/main/java/org/apache/parquet/hive/HiveBindingFactory.java b/parquet-hive/parquet-hive-binding/parquet-hive-binding-factory/src/main/java/org/apache/parquet/hive/HiveBindingFactory.java
index ba6003c..5ecc2d1 100644
--- a/parquet-hive/parquet-hive-binding/parquet-hive-binding-factory/src/main/java/org/apache/parquet/hive/HiveBindingFactory.java
+++ b/parquet-hive/parquet-hive-binding/parquet-hive-binding-factory/src/main/java/org/apache/parquet/hive/HiveBindingFactory.java
@@ -20,9 +20,10 @@ package org.apache.parquet.hive;
 
 import java.lang.reflect.Method;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.hive.internal.Hive010Binding;
 import org.apache.parquet.hive.internal.Hive012Binding;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Factory for creating HiveBinding objects based on the version of Hive
@@ -30,7 +31,7 @@ import org.apache.parquet.hive.internal.Hive012Binding;
  * to enable mocking.
  */
 public class HiveBindingFactory {
-  private static final Log LOG = Log.getLog(HiveBindingFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HiveBindingFactory.class);
   private static final String HIVE_VERSION_CLASS_NAME = "org.apache.hive.common.util.HiveVersionInfo";
   private static final String HIVE_VERSION_METHOD_NAME = "getVersion";
   private static final String HIVE_UTILITIES_CLASS_NAME = "org.apache.hadoop.hive.ql.exec.Utilities";
@@ -68,7 +69,7 @@ public class HiveBindingFactory {
     try {
       hiveVersionInfo = Class.forName(HIVE_VERSION_CLASS_NAME, true, classLoader);
     } catch (ClassNotFoundException e) {
-      LOG.debug("Class " + HIVE_VERSION_CLASS_NAME + ", not found, returning " + 
+      LOG.debug("Class " + HIVE_VERSION_CLASS_NAME + ", not found, returning {}",
           Hive010Binding.class.getSimpleName());
       return Hive010Binding.class;
     }
@@ -85,8 +86,7 @@ public class HiveBindingFactory {
       Method getVersionMethod = hiveVersionInfo.
           getMethod(HIVE_VERSION_METHOD_NAME, (Class[])null);
       String rawVersion = (String)getVersionMethod.invoke(null, (Object[])null);
-      LOG.debug("Raw Version from " + hiveVersionInfo.getSimpleName() + " is '" +
-          rawVersion + "'");
+      LOG.debug("Raw Version from {} is '{}'", hiveVersionInfo.getSimpleName(), rawVersion);
       hiveVersion = trimVersion(rawVersion);
     } catch (Exception e) {
       throw new UnexpectedHiveVersionProviderError("Unexpected error whilst " +
@@ -97,8 +97,7 @@ public class HiveBindingFactory {
       return createBindingForUnknownVersion();
     }
     if(hiveVersion.startsWith(HIVE_VERSION_010)) {
-      LOG.debug("Hive version " + hiveVersion + ", returning " +
-          Hive010Binding.class.getSimpleName());
+      LOG.debug("Hive version {}, returning {}", hiveVersion, Hive010Binding.class.getSimpleName());
       return Hive010Binding.class;
     } else if(hiveVersion.startsWith(HIVE_VERSION_011)) {
       LOG.debug("Hive version " + hiveVersion + ", returning " +
@@ -110,7 +109,7 @@ public class HiveBindingFactory {
           "and the parquet-hive jars from the parquet project should not be included " +
           "in Hive's classpath.");
     }
-    LOG.debug("Hive version " + hiveVersion + ", returning " +
+    LOG.debug("Hive version {}, returning {}", hiveVersion,
         Hive012Binding.class.getSimpleName());
     // as of 11/26/2013 it looks like the 0.12 binding will work for 0.13
     return Hive012Binding.class;

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
----------------------------------------------------------------------
diff --git a/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java b/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
index 9b87719..7d9ac8f 100644
--- a/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
+++ b/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
@@ -24,8 +24,6 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
@@ -46,6 +44,8 @@ import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.util.Progressable;
 
 import org.apache.parquet.hadoop.ParquetOutputFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *
@@ -55,7 +55,7 @@ import org.apache.parquet.hadoop.ParquetOutputFormat;
 public class MapredParquetOutputFormat extends FileOutputFormat<Void, ArrayWritable> implements
   HiveOutputFormat<Void, ArrayWritable> {
 
-  private static final Log LOG = LogFactory.getLog(MapredParquetOutputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MapredParquetOutputFormat.class);
 
   protected ParquetOutputFormat<ArrayWritable> realOutputFormat;
 
@@ -96,7 +96,7 @@ public class MapredParquetOutputFormat extends FileOutputFormat<Void, ArrayWrita
       final Properties tableProperties,
       final Progressable progress) throws IOException {
 
-    LOG.info("creating new record writer..." + this);
+    LOG.info("creating new record writer...{}", this);
 
     final String columnNameProperty = tableProperties.getProperty(IOConstants.COLUMNS);
     final String columnTypeProperty = tableProperties.getProperty(IOConstants.COLUMNS_TYPES);

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
----------------------------------------------------------------------
diff --git a/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java b/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
index a225a95..053578d 100644
--- a/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
+++ b/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
@@ -22,8 +22,6 @@ import static org.apache.parquet.format.converter.ParquetMetadataConverter.SKIP_
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.io.ArrayWritable;
@@ -46,9 +44,11 @@ import org.apache.parquet.hadoop.util.ContextUtil;
 import org.apache.parquet.hive.HiveBinding;
 import org.apache.parquet.hive.HiveBindingFactory;
 import org.apache.parquet.schema.MessageTypeParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class ParquetRecordReaderWrapper  implements RecordReader<Void, ArrayWritable> {
-  public static final Log LOG = LogFactory.getLog(ParquetRecordReaderWrapper.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ParquetRecordReaderWrapper.class);
 
   private final long splitLen; // for getPos()
 

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
----------------------------------------------------------------------
diff --git a/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java b/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
index 117e9d4..8d4c5d7 100644
--- a/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
+++ b/parquet-hive/parquet-hive-storage-handler/src/main/java/org/apache/hadoop/hive/ql/io/parquet/write/ParquetRecordWriterWrapper.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hive.ql.io.parquet.write;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.io.Writable;
@@ -36,11 +34,13 @@ import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 
 import org.apache.parquet.hadoop.ParquetOutputFormat;
 import org.apache.parquet.hadoop.util.ContextUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class ParquetRecordWriterWrapper implements RecordWriter<Void, ArrayWritable>,
   FileSinkOperator.RecordWriter {
 
-  public static final Log LOG = LogFactory.getLog(ParquetRecordWriterWrapper.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ParquetRecordWriterWrapper.class);
 
   private final org.apache.hadoop.mapreduce.RecordWriter<Void, ArrayWritable> realWriter;
   private final TaskAttemptContext taskContext;
@@ -58,10 +58,10 @@ public class ParquetRecordWriterWrapper implements RecordWriter<Void, ArrayWrita
       }
       taskContext = ContextUtil.newTaskAttemptContext(jobConf, taskAttemptID);
 
-      LOG.info("creating real writer to write at " + name);
+      LOG.info("creating real writer to write at {}", name);
       realWriter = (org.apache.hadoop.mapreduce.RecordWriter<Void, ArrayWritable>)
           ((ParquetOutputFormat) realOutputFormat).getRecordWriter(taskContext, new Path(name));
-      LOG.info("real writer: " + realWriter);
+      LOG.info("real writer: {}", realWriter);
     } catch (final InterruptedException e) {
       throw new IOException(e);
     }

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-pig/src/main/java/org/apache/parquet/pig/ParquetLoader.java
----------------------------------------------------------------------
diff --git a/parquet-pig/src/main/java/org/apache/parquet/pig/ParquetLoader.java b/parquet-pig/src/main/java/org/apache/parquet/pig/ParquetLoader.java
index be54aa8..7f87691 100644
--- a/parquet-pig/src/main/java/org/apache/parquet/pig/ParquetLoader.java
+++ b/parquet-pig/src/main/java/org/apache/parquet/pig/ParquetLoader.java
@@ -20,7 +20,6 @@ package org.apache.parquet.pig;
 
 import static java.util.Arrays.asList;
 import static org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPaths;
-import static org.apache.parquet.Log.DEBUG;
 import static org.apache.parquet.hadoop.util.ContextUtil.getConfiguration;
 import static org.apache.parquet.pig.PigSchemaConverter.parsePigSchema;
 import static org.apache.parquet.pig.PigSchemaConverter.pigSchemaToString;
@@ -74,10 +73,11 @@ import static org.apache.pig.Expression.Column;
 import static org.apache.pig.Expression.Const;
 import static org.apache.pig.Expression.OpType;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.hadoop.ParquetInputFormat;
 import org.apache.parquet.hadoop.metadata.GlobalMetaData;
 import org.apache.parquet.io.ParquetDecodingException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *
@@ -88,7 +88,7 @@ import org.apache.parquet.io.ParquetDecodingException;
  *
  */
 public class ParquetLoader extends LoadFunc implements LoadMetadata, LoadPushDown, LoadPredicatePushdown {
-  private static final Log LOG = Log.getLog(ParquetLoader.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParquetLoader.class);
 
   public static final String ENABLE_PREDICATE_FILTER_PUSHDOWN = "parquet.pig.predicate.pushdown.enable";
   private static final boolean DEFAULT_PREDICATE_PUSHDOWN_ENABLED = false;
@@ -157,9 +157,9 @@ public class ParquetLoader extends LoadFunc implements LoadMetadata, LoadPushDow
 
   @Override
   public void setLocation(String location, Job job) throws IOException {
-    if (DEBUG) {
+    if (LOG.isDebugEnabled()) {
       String jobToString = String.format("job[id=%s, name=%s]", job.getJobID(), job.getJobName());
-      LOG.debug("LoadFunc.setLocation(" + location + ", " + jobToString + ")");
+      LOG.debug("LoadFunc.setLocation({}, {})", location, jobToString);
     }
 
     setInput(location, job);
@@ -201,7 +201,7 @@ public class ParquetLoader extends LoadFunc implements LoadMetadata, LoadPushDow
 
   @Override
   public InputFormat<Void, Tuple> getInputFormat() throws IOException {
-    if (DEBUG) LOG.debug("LoadFunc.getInputFormat()");
+    LOG.debug("LoadFunc.getInputFormat()");
     return getParquetInputFormat();
   }
 
@@ -248,7 +248,7 @@ public class ParquetLoader extends LoadFunc implements LoadMetadata, LoadPushDow
   @Override
   public void prepareToRead(@SuppressWarnings("rawtypes") RecordReader reader, PigSplit split)
       throws IOException {
-    if (DEBUG) LOG.debug("LoadFunc.prepareToRead(" + reader + ", " + split + ")");
+    LOG.debug("LoadFunc.prepareToRead({}, {})", reader, split);
     this.reader = reader;
   }
 
@@ -268,9 +268,9 @@ public class ParquetLoader extends LoadFunc implements LoadMetadata, LoadPushDow
 
   @Override
   public String[] getPartitionKeys(String location, Job job) throws IOException {
-    if (DEBUG) {
+    if (LOG.isDebugEnabled()) {
       String jobToString = String.format("job[id=%s, name=%s]", job.getJobID(), job.getJobName());
-      LOG.debug("LoadMetadata.getPartitionKeys(" + location + ", " + jobToString + ")");
+      LOG.debug("LoadMetadata.getPartitionKeys({}, {})", location, jobToString);
     }
     setInput(location, job);
     return null;
@@ -278,9 +278,9 @@ public class ParquetLoader extends LoadFunc implements LoadMetadata, LoadPushDow
 
   @Override
   public ResourceSchema getSchema(String location, Job job) throws IOException {
-    if (DEBUG) {
+    if (LOG.isDebugEnabled()) {
       String jobToString = String.format("job[id=%s, name=%s]", job.getJobID(), job.getJobName());
-      LOG.debug("LoadMetadata.getSchema(" + location + ", " + jobToString + ")");
+      LOG.debug("LoadMetadata.getSchema({}, {})", location, jobToString);
     }
     setInput(location, job);
     return new ResourceSchema(schema);
@@ -323,9 +323,9 @@ public class ParquetLoader extends LoadFunc implements LoadMetadata, LoadPushDow
   @Override
   public ResourceStatistics getStatistics(String location, Job job)
       throws IOException {
-    if (DEBUG) {
+    if (LOG.isDebugEnabled()) {
       String jobToString = String.format("job[id=%s, name=%s]", job.getJobID(), job.getJobName());
-      LOG.debug("LoadMetadata.getStatistics(" + location + ", " + jobToString + ")");
+      LOG.debug("LoadMetadata.getStatistics({}, {})", location, jobToString);
     }
     /* We need to call setInput since setLocation is not
        guaranteed to be called before this */
@@ -347,7 +347,7 @@ public class ParquetLoader extends LoadFunc implements LoadMetadata, LoadPushDow
 
   @Override
   public void setPartitionFilter(Expression expression) throws IOException {
-    if (DEBUG) LOG.debug("LoadMetadata.setPartitionFilter(" + expression + ")");
+    LOG.debug("LoadMetadata.setPartitionFilter({})", expression);
   }
 
   @Override
@@ -465,10 +465,10 @@ public class ParquetLoader extends LoadFunc implements LoadMetadata, LoadPushDow
 
   @Override
   public void setPushdownPredicate(Expression e) throws IOException {
-    LOG.info("Pig pushdown expression: " + e);
+    LOG.info("Pig pushdown expression: {}", e);
 
     FilterPredicate pred = buildFilter(e);
-    LOG.info("Parquet filter predicate expression: " + pred);
+    LOG.info("Parquet filter predicate expression: {}", pred);
 
     storeInUDFContext(ParquetInputFormat.FILTER_PREDICATE, pred);
   }

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-pig/src/main/java/org/apache/parquet/pig/PigSchemaConverter.java
----------------------------------------------------------------------
diff --git a/parquet-pig/src/main/java/org/apache/parquet/pig/PigSchemaConverter.java b/parquet-pig/src/main/java/org/apache/parquet/pig/PigSchemaConverter.java
index e3e4b53..c9eb0ba 100644
--- a/parquet-pig/src/main/java/org/apache/parquet/pig/PigSchemaConverter.java
+++ b/parquet-pig/src/main/java/org/apache/parquet/pig/PigSchemaConverter.java
@@ -18,7 +18,6 @@
  */
 package org.apache.parquet.pig;
 
-import static org.apache.parquet.Log.DEBUG;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -36,7 +35,6 @@ import org.apache.pig.impl.util.Pair;
 import org.apache.pig.impl.util.Utils;
 import org.apache.pig.parser.ParserException;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.schema.ConversionPatterns;
 import org.apache.parquet.schema.GroupType;
 import org.apache.parquet.schema.MessageType;
@@ -46,6 +44,8 @@ import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
 import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeNameConverter;
 import org.apache.parquet.schema.Type;
 import org.apache.parquet.schema.Type.Repetition;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
@@ -60,7 +60,7 @@ import org.apache.parquet.schema.Type.Repetition;
  *
  */
 public class PigSchemaConverter {
-  private static final Log LOG = Log.getLog(PigSchemaConverter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PigSchemaConverter.class);
   static final String ARRAY_VALUE_NAME = "value";
   private ColumnAccess columnAccess;
 
@@ -456,9 +456,9 @@ public class PigSchemaConverter {
    */
   public MessageType filter(MessageType schemaToFilter, Schema requestedPigSchema, RequiredFieldList requiredFieldList) {
     try {
-      if (DEBUG) LOG.debug("filtering schema:\n" + schemaToFilter + "\nwith requested pig schema:\n " + requestedPigSchema);
+      if (LOG.isDebugEnabled()) LOG.debug("filtering schema:\n" + schemaToFilter + "\nwith requested pig schema:\n " + requestedPigSchema);
       List<Type> result = columnAccess.filterTupleSchema(schemaToFilter, requestedPigSchema, requiredFieldList);
-      if (DEBUG) LOG.debug("schema:\n" + schemaToFilter + "\nfiltered to:\n" + result);
+      if (LOG.isDebugEnabled()) LOG.debug("schema:\n" + schemaToFilter + "\nfiltered to:\n" + result);
       return new MessageType(schemaToFilter.getName(), result);
     } catch (RuntimeException e) {
       throw new RuntimeException("can't filter " + schemaToFilter + " with " + requestedPigSchema, e);
@@ -466,7 +466,7 @@ public class PigSchemaConverter {
   }
 
   private Type filter(Type type, FieldSchema fieldSchema) {
-    if (DEBUG) LOG.debug("filtering type:\n" + type + "\nwith:\n " + fieldSchema);
+    if (LOG.isDebugEnabled()) LOG.debug("filtering type:\n" + type + "\nwith:\n " + fieldSchema);
     try {
       switch (fieldSchema.type) {
       case DataType.BAG:
@@ -486,12 +486,12 @@ public class PigSchemaConverter {
   }
 
   private Type filterTuple(GroupType tupleType, FieldSchema tupleFieldSchema) throws FrontendException {
-    if (DEBUG) LOG.debug("filtering TUPLE schema:\n" + tupleType + "\nwith:\n " + tupleFieldSchema);
+    if (LOG.isDebugEnabled()) LOG.debug("filtering TUPLE schema:\n" + tupleType + "\nwith:\n " + tupleFieldSchema);
     return tupleType.withNewFields(columnAccess.filterTupleSchema(tupleType, tupleFieldSchema.schema, null));
   }
 
   private Type filterMap(GroupType mapType, FieldSchema mapFieldSchema) throws FrontendException {
-    if (DEBUG) LOG.debug("filtering MAP schema:\n" + mapType + "\nwith:\n " + mapFieldSchema);
+    if (LOG.isDebugEnabled()) LOG.debug("filtering MAP schema:\n" + mapType + "\nwith:\n " + mapFieldSchema);
     if (mapType.getFieldCount() != 1) {
       throw new RuntimeException("not unwrapping the right type, this should be a Map: " + mapType);
     }
@@ -504,7 +504,7 @@ public class PigSchemaConverter {
   }
 
   private Type filterBag(GroupType bagType, FieldSchema bagFieldSchema) throws FrontendException {
-    if (DEBUG) LOG.debug("filtering BAG schema:\n" + bagType + "\nwith:\n " + bagFieldSchema);
+    if (LOG.isDebugEnabled()) LOG.debug("filtering BAG schema:\n" + bagType + "\nwith:\n " + bagFieldSchema);
     if (bagType.getFieldCount() != 1) {
       throw new RuntimeException("not unwrapping the right type, this should be a Bag: " + bagType);
     }

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-pig/src/main/java/org/apache/parquet/pig/TupleReadSupport.java
----------------------------------------------------------------------
diff --git a/parquet-pig/src/main/java/org/apache/parquet/pig/TupleReadSupport.java b/parquet-pig/src/main/java/org/apache/parquet/pig/TupleReadSupport.java
index ee7c710..75bb5b5 100644
--- a/parquet-pig/src/main/java/org/apache/parquet/pig/TupleReadSupport.java
+++ b/parquet-pig/src/main/java/org/apache/parquet/pig/TupleReadSupport.java
@@ -33,7 +33,6 @@ import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
 import org.apache.pig.impl.util.ObjectSerializer;
-import org.apache.parquet.Log;
 import org.apache.parquet.hadoop.api.InitContext;
 import org.apache.parquet.hadoop.api.ReadSupport;
 import org.apache.parquet.io.ParquetDecodingException;
@@ -41,6 +40,8 @@ import org.apache.parquet.io.api.RecordMaterializer;
 import org.apache.parquet.pig.convert.TupleRecordMaterializer;
 import org.apache.parquet.schema.IncompatibleSchemaModificationException;
 import org.apache.parquet.schema.MessageType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Read support for Pig Tuple
@@ -54,7 +55,7 @@ public class TupleReadSupport extends ReadSupport<Tuple> {
   static final String PARQUET_COLUMN_INDEX_ACCESS = "parquet.private.pig.column.index.access";
   static final String PARQUET_PIG_REQUIRED_FIELDS = "parquet.private.pig.required.fields";
   static final String PARQUET_PIG_ELEPHANT_BIRD_COMPATIBLE = "parquet.pig.elephantbird.compatible";
-  private static final Log LOG = Log.getLog(TupleReadSupport.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TupleReadSupport.class);
 
   private static final PigSchemaConverter pigSchemaConverter = new PigSchemaConverter(false);
 

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-pig/src/test/java/org/apache/parquet/pig/PerfTest2.java
----------------------------------------------------------------------
diff --git a/parquet-pig/src/test/java/org/apache/parquet/pig/PerfTest2.java b/parquet-pig/src/test/java/org/apache/parquet/pig/PerfTest2.java
index 388d527..0b8a464 100644
--- a/parquet-pig/src/test/java/org/apache/parquet/pig/PerfTest2.java
+++ b/parquet-pig/src/test/java/org/apache/parquet/pig/PerfTest2.java
@@ -48,8 +48,9 @@ import org.apache.pig.data.TupleFactory;
 import org.apache.pig.impl.util.Utils;
 import org.apache.pig.parser.ParserException;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.hadoop.util.ContextUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *
@@ -60,6 +61,9 @@ import org.apache.parquet.hadoop.util.ContextUtil;
  */
 public class PerfTest2 {
 
+  private static final Logger LOG = LoggerFactory.getLogger(PerfTest2.class);
+  private static final boolean DEBUG = LOG.isDebugEnabled();
+
   static final int COLUMN_COUNT = 50;
   private static final long ROW_COUNT = 100000;
   private static Configuration conf = new Configuration();
@@ -173,7 +177,7 @@ public class PerfTest2 {
       recordReader.initialize(split, taskAttemptContext);
       Tuple t;
       while ((t = loadFunc.getNext()) != null) {
-        if (Log.DEBUG) System.out.println(t);
+        if (DEBUG) System.out.println(t);
         ++i;
       }
     }

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-pig/src/test/java/org/apache/parquet/pig/TestTupleRecordConsumer.java
----------------------------------------------------------------------
diff --git a/parquet-pig/src/test/java/org/apache/parquet/pig/TestTupleRecordConsumer.java b/parquet-pig/src/test/java/org/apache/parquet/pig/TestTupleRecordConsumer.java
index 83e1227..ef048f2 100644
--- a/parquet-pig/src/test/java/org/apache/parquet/pig/TestTupleRecordConsumer.java
+++ b/parquet-pig/src/test/java/org/apache/parquet/pig/TestTupleRecordConsumer.java
@@ -40,7 +40,6 @@ import org.apache.pig.impl.util.Utils;
 import org.apache.pig.parser.ParserException;
 import org.junit.Test;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.example.data.Group;
 import org.apache.parquet.example.data.GroupWriter;
 import org.apache.parquet.example.data.simple.SimpleGroup;
@@ -51,9 +50,11 @@ import org.apache.parquet.io.ConverterConsumer;
 import org.apache.parquet.io.RecordConsumerLoggingWrapper;
 import org.apache.parquet.io.api.RecordMaterializer;
 import org.apache.parquet.schema.MessageType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestTupleRecordConsumer {
-  private static final Log logger = Log.getLog(TestTupleRecordConsumer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestTupleRecordConsumer.class);
 
   @Test
   public void testArtSchema() throws ExecException, ParserException {
@@ -127,7 +128,7 @@ public class TestTupleRecordConsumer {
     RecordMaterializer<Tuple> recordConsumer = newPigRecordConsumer(pigSchemaString);
     TupleWriteSupport tupleWriter = newTupleWriter(pigSchemaString, recordConsumer);
     for (Tuple tuple : input) {
-      logger.debug(tuple);
+      LOG.debug("{}", tuple);
       tupleWriter.write(tuple);
       tuples.add(recordConsumer.getCurrentRecord());
     }
@@ -151,14 +152,14 @@ public class TestTupleRecordConsumer {
       groupWriter.write(group);
       final Tuple tuple = pigRecordConsumer.getCurrentRecord();
       tuples.add(tuple);
-      logger.debug("in: "+group+"\nout:"+tuple);
+      LOG.debug("in: {}\nout:{}", group, tuple);
     }
 
     List<Group> groups = new ArrayList<Group>();
     GroupRecordConverter recordConsumer = new GroupRecordConverter(schema);
     TupleWriteSupport tupleWriter = newTupleWriter(pigSchemaString, recordConsumer);
     for (Tuple t : tuples) {
-      logger.debug(t);
+      LOG.debug("{}", t);
       tupleWriter.write(t);
       groups.add(recordConsumer.getCurrentRecord());
     }
@@ -166,7 +167,7 @@ public class TestTupleRecordConsumer {
     assertEquals(input.size(), groups.size());
     for (int i = 0; i < input.size(); i++) {
       Group in = input.get(i);
-      logger.debug(in);
+      LOG.debug("{}", in);
       Group out = groups.get(i);
       assertEquals(in.toString(), out.toString());
     }

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-pig/src/test/java/org/apache/parquet/pig/TupleConsumerPerfTest.java
----------------------------------------------------------------------
diff --git a/parquet-pig/src/test/java/org/apache/parquet/pig/TupleConsumerPerfTest.java b/parquet-pig/src/test/java/org/apache/parquet/pig/TupleConsumerPerfTest.java
index ff192e2..2148e06 100644
--- a/parquet-pig/src/test/java/org/apache/parquet/pig/TupleConsumerPerfTest.java
+++ b/parquet-pig/src/test/java/org/apache/parquet/pig/TupleConsumerPerfTest.java
@@ -31,7 +31,6 @@ import org.apache.pig.data.TupleFactory;
 import org.apache.pig.impl.util.Utils;
 import org.apache.pig.parser.ParserException;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.column.impl.ColumnWriteStoreV1;
 import org.apache.parquet.column.page.PageReadStore;
 import org.apache.parquet.column.page.mem.MemPageStore;

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoReadSupport.java
----------------------------------------------------------------------
diff --git a/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoReadSupport.java b/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoReadSupport.java
index e6921db..3a21d84 100644
--- a/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoReadSupport.java
+++ b/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoReadSupport.java
@@ -21,11 +21,12 @@ package org.apache.parquet.proto;
 import com.google.protobuf.Message;
 import com.twitter.elephantbird.util.Protobufs;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.parquet.Log;
 import org.apache.parquet.hadoop.api.InitContext;
 import org.apache.parquet.hadoop.api.ReadSupport;
 import org.apache.parquet.io.api.RecordMaterializer;
 import org.apache.parquet.schema.MessageType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Map;
 
@@ -35,7 +36,7 @@ import java.util.Map;
  */
 public class ProtoReadSupport<T extends Message> extends ReadSupport<T> {
 
-  private static final Log LOG = Log.getLog(ProtoReadSupport.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ProtoReadSupport.class);
 
   public static final String PB_REQUESTED_PROJECTION = "parquet.proto.projection";
 
@@ -62,11 +63,11 @@ public class ProtoReadSupport<T extends Message> extends ReadSupport<T> {
 
     if (requestedProjectionString != null && !requestedProjectionString.trim().isEmpty()) {
       MessageType requestedProjection = getSchemaForRead(context.getFileSchema(), requestedProjectionString);
-      LOG.debug("Reading data with projection " + requestedProjection);
+      LOG.debug("Reading data with projection {}", requestedProjection);
       return new ReadContext(requestedProjection);
     } else {
       MessageType fileSchema = context.getFileSchema();
-      LOG.debug("Reading data with schema " + fileSchema);
+      LOG.debug("Reading data with schema {}", fileSchema);
       return new ReadContext(fileSchema);
     }
   }
@@ -85,7 +86,7 @@ public class ProtoReadSupport<T extends Message> extends ReadSupport<T> {
       throw new RuntimeException("I Need parameter " + PB_CLASS + " with Protocol Buffer class");
     }
 
-    LOG.debug("Reading data with Protocol Buffer class " + headerProtoClass);
+    LOG.debug("Reading data with Protocol Buffer class {}", headerProtoClass);
 
     MessageType requestedSchema = readContext.getRequestedSchema();
     Class<? extends Message> protobufClass = Protobufs.getProtobufClass(headerProtoClass);

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoSchemaConverter.java
----------------------------------------------------------------------
diff --git a/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoSchemaConverter.java b/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoSchemaConverter.java
index 3f6ed6b..2c4a1ca 100644
--- a/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoSchemaConverter.java
+++ b/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoSchemaConverter.java
@@ -29,7 +29,6 @@ import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64;
 
 import java.util.List;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.schema.MessageType;
 import org.apache.parquet.schema.Type;
 import org.apache.parquet.schema.Types;
@@ -40,6 +39,8 @@ import com.google.protobuf.Descriptors;
 import com.google.protobuf.Descriptors.FieldDescriptor.JavaType;
 import com.google.protobuf.Message;
 import com.twitter.elephantbird.util.Protobufs;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <p/>
@@ -49,7 +50,7 @@ import com.twitter.elephantbird.util.Protobufs;
  */
 public class ProtoSchemaConverter {
 
-  private static final Log LOG = Log.getLog(ProtoSchemaConverter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ProtoSchemaConverter.class);
 
   public MessageType convert(Class<? extends Message> protobufClass) {
     LOG.debug("Converting protocol buffer class \"" + protobufClass + "\" to parquet schema.");

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoWriteSupport.java
----------------------------------------------------------------------
diff --git a/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoWriteSupport.java b/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoWriteSupport.java
index cef2b93..c0ed351 100644
--- a/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoWriteSupport.java
+++ b/parquet-protobuf/src/main/java/org/apache/parquet/proto/ProtoWriteSupport.java
@@ -26,7 +26,6 @@ import com.google.protobuf.MessageOrBuilder;
 import com.google.protobuf.TextFormat;
 import com.twitter.elephantbird.util.Protobufs;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.parquet.Log;
 import org.apache.parquet.hadoop.BadConfigurationException;
 import org.apache.parquet.hadoop.api.WriteSupport;
 import org.apache.parquet.io.InvalidRecordException;
@@ -36,6 +35,8 @@ import org.apache.parquet.schema.GroupType;
 import org.apache.parquet.schema.IncompatibleSchemaModificationException;
 import org.apache.parquet.schema.MessageType;
 import org.apache.parquet.schema.Type;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.lang.reflect.Array;
 import java.util.HashMap;
@@ -48,7 +49,7 @@ import java.util.Map;
  */
 public class ProtoWriteSupport<T extends MessageOrBuilder> extends WriteSupport<T> {
 
-  private static final Log LOG = Log.getLog(ProtoWriteSupport.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ProtoWriteSupport.class);
   public static final String PB_CLASS_WRITE = "parquet.proto.writeClass";
 
   private RecordConsumer recordConsumer;

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-protobuf/src/test/java/org/apache/parquet/proto/utils/WriteUsingMR.java
----------------------------------------------------------------------
diff --git a/parquet-protobuf/src/test/java/org/apache/parquet/proto/utils/WriteUsingMR.java b/parquet-protobuf/src/test/java/org/apache/parquet/proto/utils/WriteUsingMR.java
index c87cb44..d18076a 100644
--- a/parquet-protobuf/src/test/java/org/apache/parquet/proto/utils/WriteUsingMR.java
+++ b/parquet-protobuf/src/test/java/org/apache/parquet/proto/utils/WriteUsingMR.java
@@ -27,9 +27,10 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
-import org.apache.parquet.Log;
 import org.apache.parquet.proto.ProtoParquetOutputFormat;
 import org.apache.parquet.proto.TestUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -44,7 +45,7 @@ import static java.lang.Thread.sleep;
  */
 public class WriteUsingMR {
 
-  private static final Log LOG = Log.getLog(WriteUsingMR.class);
+  private static final Logger LOG = LoggerFactory.getLogger(WriteUsingMR.class);
   Configuration conf = new Configuration();
   private static List<Message> inputMessages;
   Path outputPath;
@@ -61,7 +62,7 @@ public class WriteUsingMR {
       } else {
         for (Message msg : inputMessages) {
           context.write(null, msg);
-          LOG.debug("Reading msg from mock writing mapper" + msg);
+          LOG.debug("Reading msg from mock writing mapper {}", msg);
         }
       }
     }
@@ -102,7 +103,7 @@ public class WriteUsingMR {
   static void waitForJob(Job job) throws Exception {
     job.submit();
     while (!job.isComplete()) {
-      LOG.debug("waiting for job " + job.getJobName());
+      LOG.debug("waiting for job {}", job.getJobName());
       sleep(50);
     }
     LOG.debug("status for job " + job.getJobName() + ": " + (job.isSuccessful() ? "SUCCESS" : "FAILURE"));

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-thrift/src/main/java/org/apache/parquet/hadoop/thrift/AbstractThriftWriteSupport.java
----------------------------------------------------------------------
diff --git a/parquet-thrift/src/main/java/org/apache/parquet/hadoop/thrift/AbstractThriftWriteSupport.java b/parquet-thrift/src/main/java/org/apache/parquet/hadoop/thrift/AbstractThriftWriteSupport.java
index 5f210d3..fe8019c 100644
--- a/parquet-thrift/src/main/java/org/apache/parquet/hadoop/thrift/AbstractThriftWriteSupport.java
+++ b/parquet-thrift/src/main/java/org/apache/parquet/hadoop/thrift/AbstractThriftWriteSupport.java
@@ -22,7 +22,6 @@ import org.apache.thrift.TBase;
 
 import com.twitter.elephantbird.pig.util.ThriftToPig;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.hadoop.BadConfigurationException;
 import org.apache.parquet.hadoop.api.WriteSupport;
 import org.apache.parquet.io.ColumnIOFactory;
@@ -34,11 +33,13 @@ import org.apache.parquet.thrift.ParquetWriteProtocol;
 import org.apache.parquet.thrift.ThriftMetaData;
 import org.apache.parquet.thrift.ThriftSchemaConverter;
 import org.apache.parquet.thrift.struct.ThriftType.StructType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 public abstract class AbstractThriftWriteSupport<T> extends WriteSupport<T> {
   public static final String PARQUET_THRIFT_CLASS = "parquet.thrift.class";
-  private static final Log LOG = Log.getLog(AbstractThriftWriteSupport.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AbstractThriftWriteSupport.class);
 
   public static void setGenericThriftClass(Configuration configuration, Class<?> thriftClass) {
     configuration.set(PARQUET_THRIFT_CLASS, thriftClass.getName());

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-thrift/src/main/java/org/apache/parquet/hadoop/thrift/ThriftReadSupport.java
----------------------------------------------------------------------
diff --git a/parquet-thrift/src/main/java/org/apache/parquet/hadoop/thrift/ThriftReadSupport.java b/parquet-thrift/src/main/java/org/apache/parquet/hadoop/thrift/ThriftReadSupport.java
index 1c020ae..f49fb67 100644
--- a/parquet-thrift/src/main/java/org/apache/parquet/hadoop/thrift/ThriftReadSupport.java
+++ b/parquet-thrift/src/main/java/org/apache/parquet/hadoop/thrift/ThriftReadSupport.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.thrift.TBase;
 import org.apache.thrift.protocol.TProtocol;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.Strings;
 import org.apache.parquet.hadoop.api.InitContext;
 import org.apache.parquet.hadoop.api.ReadSupport;
@@ -44,9 +43,11 @@ import org.apache.parquet.thrift.projection.StrictFieldProjectionFilter;
 import org.apache.parquet.thrift.projection.ThriftProjectionException;
 import org.apache.parquet.thrift.projection.deprecated.DeprecatedFieldProjectionFilter;
 import org.apache.parquet.thrift.struct.ThriftType.StructType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class ThriftReadSupport<T> extends ReadSupport<T> {
-  private static final Log LOG = Log.getLog(ThriftReadSupport.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ThriftReadSupport.class);
 
   /**
    * Deprecated. Use {@link #STRICT_THRIFT_COLUMN_FILTER_KEY}
@@ -128,8 +129,8 @@ public class ThriftReadSupport<T> extends ReadSupport<T> {
     }
 
     if (!Strings.isNullOrEmpty(deprecated)) {
-      LOG.warn(String.format("Using %s is deprecated. Please see the docs for %s!",
-          THRIFT_COLUMN_FILTER_KEY, STRICT_THRIFT_COLUMN_FILTER_KEY));
+      LOG.warn("Using {} is deprecated. Please see the docs for {}!",
+          THRIFT_COLUMN_FILTER_KEY, STRICT_THRIFT_COLUMN_FILTER_KEY);
       return new DeprecatedFieldProjectionFilter(deprecated);
     }
 

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-thrift/src/main/java/org/apache/parquet/thrift/ParquetReadProtocol.java
----------------------------------------------------------------------
diff --git a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ParquetReadProtocol.java b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ParquetReadProtocol.java
index d3b496a..b72c85c 100644
--- a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ParquetReadProtocol.java
+++ b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ParquetReadProtocol.java
@@ -18,7 +18,6 @@
  */
 package org.apache.parquet.thrift;
 
-import static org.apache.parquet.Log.DEBUG;
 
 import java.nio.ByteBuffer;
 import java.util.Collection;
@@ -33,12 +32,12 @@ import org.apache.thrift.protocol.TMessage;
 import org.apache.thrift.protocol.TProtocol;
 import org.apache.thrift.protocol.TSet;
 import org.apache.thrift.protocol.TStruct;
-
-import org.apache.parquet.Log;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 class ParquetReadProtocol extends ParquetProtocol {
-  private static final Log LOG = Log.getLog(ParquetReadProtocol.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParquetReadProtocol.class);
 
   ParquetReadProtocol() {
     super("read");
@@ -63,102 +62,102 @@ class ParquetReadProtocol extends ParquetProtocol {
   }
 
   public TMessage readMessageBegin() throws TException {
-    if (DEBUG) LOG.debug("readMessageBegin()");
+    LOG.debug("readMessageBegin()");
     return next().readMessageBegin();
   }
 
   public void readMessageEnd() throws TException {
-    if (DEBUG) LOG.debug("readMessageEnd()");
+    LOG.debug("readMessageEnd()");
     next().readMessageEnd();
   }
 
   public TStruct readStructBegin() throws TException {
-    if (DEBUG) LOG.debug("readStructBegin()");
+    LOG.debug("readStructBegin()");
     return next().readStructBegin();
   }
 
   public void readStructEnd() throws TException {
-    if (DEBUG) LOG.debug("readStructEnd()");
+    LOG.debug("readStructEnd()");
     next().readStructEnd();
   }
 
   public TField readFieldBegin() throws TException {
-    if (DEBUG) LOG.debug("readFieldBegin()");
+    LOG.debug("readFieldBegin()");
     return next().readFieldBegin();
   }
 
   public void readFieldEnd() throws TException {
-    if (DEBUG) LOG.debug("readFieldEnd()");
+    LOG.debug("readFieldEnd()");
     next().readFieldEnd();
   }
 
   public TMap readMapBegin() throws TException {
-    if (DEBUG) LOG.debug("readMapBegin()");
+    LOG.debug("readMapBegin()");
     return next().readMapBegin();
   }
 
   public void readMapEnd() throws TException {
-    if (DEBUG) LOG.debug("readMapEnd()");
+    LOG.debug("readMapEnd()");
     next().readMapEnd();
   }
 
   public TList readListBegin() throws TException {
-    if (DEBUG) LOG.debug("readListBegin()");
+    LOG.debug("readListBegin()");
     return next().readListBegin();
   }
 
   public void readListEnd() throws TException {
-    if (DEBUG) LOG.debug("readListEnd()");
+    LOG.debug("readListEnd()");
     next().readListEnd();
   }
 
   public TSet readSetBegin() throws TException {
-    if (DEBUG) LOG.debug("readSetBegin()");
+    LOG.debug("readSetBegin()");
     return next().readSetBegin();
   }
 
   public void readSetEnd() throws TException {
-    if (DEBUG) LOG.debug("readSetEnd()");
+    LOG.debug("readSetEnd()");
     next().readSetEnd();
   }
 
   public boolean readBool() throws TException {
-    if (DEBUG) LOG.debug("readBool()");
+    LOG.debug("readBool()");
     return next().readBool();
   }
 
   public byte readByte() throws TException {
-    if (DEBUG) LOG.debug("readByte()");
+    LOG.debug("readByte()");
     return next().readByte();
   }
 
   public short readI16() throws TException {
-    if (DEBUG) LOG.debug("readI16()");
+    LOG.debug("readI16()");
     return next().readI16();
   }
 
   public int readI32() throws TException {
-    if (DEBUG) LOG.debug("readI32()");
+    LOG.debug("readI32()");
     return next().readI32();
   }
 
   public long readI64() throws TException {
-    if (DEBUG) LOG.debug("readI64()");
+    LOG.debug("readI64()");
     return next().readI64();
   }
 
   public double readDouble() throws TException {
-    if (DEBUG) LOG.debug("readDouble()");
+    LOG.debug("readDouble()");
     return next().readDouble();
   }
 
   public String readString() throws TException {
-    if (DEBUG) LOG.debug("readString()");
+    LOG.debug("readString()");
     return next().readString();
   }
 
   public ByteBuffer readBinary() throws TException {
-    if (DEBUG) LOG.debug("readBinary()");
+    LOG.debug("readBinary()");
     return next().readBinary();
   }
 

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-thrift/src/main/java/org/apache/parquet/thrift/ParquetWriteProtocol.java
----------------------------------------------------------------------
diff --git a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ParquetWriteProtocol.java b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ParquetWriteProtocol.java
index 40984cc..8755ee4 100644
--- a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ParquetWriteProtocol.java
+++ b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ParquetWriteProtocol.java
@@ -18,7 +18,6 @@
  */
 package org.apache.parquet.thrift;
 
-import static org.apache.parquet.Log.DEBUG;
 
 import java.nio.ByteBuffer;
 
@@ -32,7 +31,6 @@ import org.apache.thrift.protocol.TSet;
 import org.apache.thrift.protocol.TStruct;
 import org.apache.thrift.protocol.TType;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.io.ColumnIO;
 import org.apache.parquet.io.GroupColumnIO;
 import org.apache.parquet.io.MessageColumnIO;
@@ -48,6 +46,8 @@ import org.apache.parquet.thrift.struct.ThriftType.ListType;
 import org.apache.parquet.thrift.struct.ThriftType.MapType;
 import org.apache.parquet.thrift.struct.ThriftType.SetType;
 import org.apache.parquet.thrift.struct.ThriftType.StructType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class ParquetWriteProtocol extends ParquetProtocol {
 
@@ -409,7 +409,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
 
   }
 
-  private static final Log LOG = Log.getLog(ParquetWriteProtocol.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParquetWriteProtocol.class);
 
 
   private final RecordConsumer recordConsumer;
@@ -438,7 +438,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeMessageBegin(TMessage message) throws TException {
-    if (DEBUG) LOG.debug("writeMessageBegin("+message+")");
+    LOG.debug("writeMessageBegin({})", message);
     currentProtocol.writeMessageBegin(message);
   }
 
@@ -448,7 +448,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeMessageEnd() throws TException {
-    if (DEBUG) LOG.debug("writeMessageEnd()");
+    LOG.debug("writeMessageEnd()");
     currentProtocol.writeMessageEnd();
   }
 
@@ -458,7 +458,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeStructBegin(TStruct struct) throws TException {
-    if (DEBUG) LOG.debug("writeStructBegin("+toString(struct)+")");
+    if (LOG.isDebugEnabled()) LOG.debug("writeStructBegin("+toString(struct)+")");
     currentProtocol.writeStructBegin(struct);
   }
 
@@ -468,7 +468,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeStructEnd() throws TException {
-    if (DEBUG) LOG.debug("writeStructEnd()");
+    LOG.debug("writeStructEnd()");
     currentProtocol.writeStructEnd();
   }
 
@@ -478,7 +478,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeFieldBegin(TField field) throws TException {
-    if (DEBUG) LOG.debug("writeFieldBegin("+field+")");
+    LOG.debug("writeFieldBegin({})", field);
     currentProtocol.writeFieldBegin(field);
   }
 
@@ -488,7 +488,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeFieldEnd() throws TException {
-    if (DEBUG) LOG.debug("writeFieldEnd()");
+    LOG.debug("writeFieldEnd()");
     currentProtocol.writeFieldEnd();
   }
 
@@ -498,7 +498,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeFieldStop() throws TException {
-    if (DEBUG) LOG.debug("writeFieldStop()");
+    LOG.debug("writeFieldStop()");
     currentProtocol.writeFieldStop();
   }
 
@@ -508,7 +508,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeMapBegin(TMap map) throws TException {
-    if (DEBUG) LOG.debug("writeMapBegin("+toString(map)+")");
+    if (LOG.isDebugEnabled()) LOG.debug("writeMapBegin("+toString(map)+")");
     currentProtocol.writeMapBegin(map);
   }
 
@@ -518,7 +518,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeMapEnd() throws TException {
-    if (DEBUG) LOG.debug("writeMapEnd()");
+    LOG.debug("writeMapEnd()");
     currentProtocol.writeMapEnd();
   }
 
@@ -528,7 +528,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeListBegin(TList list) throws TException {
-    if (DEBUG) LOG.debug("writeListBegin("+toString(list)+")");
+    if (LOG.isDebugEnabled()) LOG.debug("writeListBegin("+toString(list)+")");
     currentProtocol.writeListBegin(list);
   }
 
@@ -539,7 +539,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeListEnd() throws TException {
-    if (DEBUG) LOG.debug("writeListEnd()");
+    LOG.debug("writeListEnd()");
     currentProtocol.writeListEnd();
   }
 
@@ -550,7 +550,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeSetBegin(TSet set) throws TException {
-    if (DEBUG) LOG.debug("writeSetBegin("+set+")");
+    LOG.debug("writeSetBegin({})", set);
     currentProtocol.writeSetBegin(set);
   }
 
@@ -560,7 +560,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeSetEnd() throws TException {
-    if (DEBUG) LOG.debug("writeSetEnd()");
+    LOG.debug("writeSetEnd()");
     currentProtocol.writeSetEnd();
   }
 
@@ -570,7 +570,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeBool(boolean b) throws TException {
-    if (DEBUG) LOG.debug("writeBool("+b+")");
+    LOG.debug("writeBool({})", b);
     currentProtocol.writeBool(b);
   }
 
@@ -580,7 +580,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeByte(byte b) throws TException {
-    if (DEBUG) LOG.debug("writeByte("+b+")");
+    LOG.debug("writeByte({})", b);
     currentProtocol.writeByte(b);
   }
 
@@ -590,7 +590,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeI16(short i16) throws TException {
-    if (DEBUG) LOG.debug("writeI16("+i16+")");
+    LOG.debug("writeI16({})", i16);
     currentProtocol.writeI16(i16);
   }
 
@@ -600,7 +600,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeI32(int i32) throws TException {
-    if (DEBUG) LOG.debug("writeI32("+i32+")");
+    LOG.debug("writeI32({})", i32);
     currentProtocol.writeI32(i32);
   }
 
@@ -610,7 +610,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeI64(long i64) throws TException {
-    if (DEBUG) LOG.debug("writeI64("+i64+")");
+    LOG.debug("writeI64({})", i64);
     currentProtocol.writeI64(i64);
   }
 
@@ -620,7 +620,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeDouble(double dub) throws TException {
-    if (DEBUG) LOG.debug("writeDouble("+dub+")");
+    LOG.debug("writeDouble({})", dub);
     currentProtocol.writeDouble(dub);
   }
 
@@ -630,7 +630,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeString(String str) throws TException {
-    if (DEBUG) LOG.debug("writeString("+str+")");
+    LOG.debug("writeString({})", str);
     currentProtocol.writeString(str);
   }
 
@@ -640,7 +640,7 @@ public class ParquetWriteProtocol extends ParquetProtocol {
    */
   @Override
   public void writeBinary(ByteBuffer buf) throws TException {
-    if (DEBUG) LOG.debug("writeBinary("+buf+")");
+    LOG.debug("writeBinary({})", buf);
     currentProtocol.writeBinary(buf);
   }
 

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftMetaData.java
----------------------------------------------------------------------
diff --git a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftMetaData.java b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftMetaData.java
index a7628cc..f61c311 100644
--- a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftMetaData.java
+++ b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftMetaData.java
@@ -19,11 +19,12 @@
 package org.apache.parquet.thrift;
 import java.util.*;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.hadoop.BadConfigurationException;
 import org.apache.parquet.thrift.struct.ThriftType;
 import org.apache.parquet.thrift.struct.ThriftType.StructType;
 import org.apache.thrift.TBase;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *
@@ -33,7 +34,7 @@ import org.apache.thrift.TBase;
  *
  */
 public class ThriftMetaData {
-  private static final Log LOG = Log.getLog(ThriftMetaData.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ThriftMetaData.class);
 
   private static final String THRIFT_CLASS = "thrift.class";
   private static final String THRIFT_DESCRIPTOR = "thrift.descriptor";

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftRecordConverter.java
----------------------------------------------------------------------
diff --git a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftRecordConverter.java b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftRecordConverter.java
index 3160d5f..0bc0455 100644
--- a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftRecordConverter.java
+++ b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftRecordConverter.java
@@ -34,7 +34,6 @@ import org.apache.thrift.protocol.TSet;
 import org.apache.thrift.protocol.TStruct;
 import org.apache.thrift.protocol.TType;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.Preconditions;
 import org.apache.parquet.io.ParquetDecodingException;
 import org.apache.parquet.io.api.Binary;
@@ -56,6 +55,8 @@ import org.apache.parquet.thrift.struct.ThriftType.MapType;
 import org.apache.parquet.thrift.struct.ThriftType.SetType;
 import org.apache.parquet.thrift.struct.ThriftType.StructType;
 import org.apache.parquet.thrift.struct.ThriftTypeID;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * converts the columnar events into a Thrift protocol.
@@ -66,7 +67,7 @@ import org.apache.parquet.thrift.struct.ThriftTypeID;
  */
 public class ThriftRecordConverter<T> extends RecordMaterializer<T> {
 
-  private static final Log LOG = Log.getLog(ThriftRecordConverter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ThriftRecordConverter.class);
 
   public static final String IGNORE_NULL_LIST_ELEMENTS =
       "parquet.thrift.ignore-null-elements";

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/StrictFieldProjectionFilter.java
----------------------------------------------------------------------
diff --git a/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/StrictFieldProjectionFilter.java b/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/StrictFieldProjectionFilter.java
index 645ae96..b048f16 100644
--- a/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/StrictFieldProjectionFilter.java
+++ b/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/StrictFieldProjectionFilter.java
@@ -21,9 +21,10 @@ package org.apache.parquet.thrift.projection;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.Strings;
 import org.apache.parquet.glob.WildcardPath;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Stricter Implementation of {@link FieldProjectionFilter}.
@@ -38,7 +39,7 @@ import org.apache.parquet.glob.WildcardPath;
  * throw when {@link #assertNoUnmatchedPatterns()} is called.
  */
 public class StrictFieldProjectionFilter implements FieldProjectionFilter {
-  private static final Log LOG = Log.getLog(FieldProjectionFilter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FieldProjectionFilter.class);
   private static final String GLOB_SEPARATOR = ";";
 
   // use a list instead of a Set, so we can detect overlapping patterns and

http://git-wip-us.apache.org/repos/asf/parquet-mr/blob/df9d8e41/parquet-thrift/src/test/java/org/apache/parquet/hadoop/thrift/TestInputOutputFormat.java
----------------------------------------------------------------------
diff --git a/parquet-thrift/src/test/java/org/apache/parquet/hadoop/thrift/TestInputOutputFormat.java b/parquet-thrift/src/test/java/org/apache/parquet/hadoop/thrift/TestInputOutputFormat.java
index 0835cdb..af8e60d 100644
--- a/parquet-thrift/src/test/java/org/apache/parquet/hadoop/thrift/TestInputOutputFormat.java
+++ b/parquet-thrift/src/test/java/org/apache/parquet/hadoop/thrift/TestInputOutputFormat.java
@@ -45,7 +45,6 @@ import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.apache.thrift.TBase;
 import org.junit.Test;
 
-import org.apache.parquet.Log;
 import org.apache.parquet.example.data.Group;
 import org.apache.parquet.hadoop.metadata.CompressionCodecName;
 import org.apache.parquet.thrift.test.compat.StructV1;
@@ -56,9 +55,11 @@ import com.twitter.data.proto.tutorial.thrift.AddressBook;
 import com.twitter.data.proto.tutorial.thrift.Name;
 import com.twitter.data.proto.tutorial.thrift.Person;
 import com.twitter.data.proto.tutorial.thrift.PhoneNumber;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestInputOutputFormat {
-  private static final Log LOG = Log.getLog(TestInputOutputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestInputOutputFormat.class);
 
   public static AddressBook nextAddressbook(int i) {
     final ArrayList<Person> persons = new ArrayList<Person>();
@@ -245,10 +246,10 @@ public class TestInputOutputFormat {
   public static void waitForJob(Job job) throws Exception {
     job.submit();
     while (!job.isComplete()) {
-      LOG.debug("waiting for job " + job.getJobName());
+      LOG.debug("waiting for job {}", job.getJobName());
       sleep(100);
     }
-    LOG.info("status for job " + job.getJobName() + ": " + (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
+    LOG.info("status for job {}: {}", job.getJobName(), (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
     if (!job.isSuccessful()) {
       throw new RuntimeException("job failed " + job.getJobName());
     }


Mime
View raw message