hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject [04/14] hive git commit: HIVE-12237 : Use slf4j as logging facade
Date Wed, 28 Oct 2015 15:45:52 GMT
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
index eeccc4b..d41253f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
@@ -27,8 +27,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
@@ -74,7 +74,7 @@ import org.apache.hadoop.hive.shims.ShimLoader;
 
 public class MapReduceCompiler extends TaskCompiler {
 
-  protected final Log LOG = LogFactory.getLog(MapReduceCompiler.class);
+  protected final Logger LOG = LoggerFactory.getLogger(MapReduceCompiler.class);
 
   public MapReduceCompiler() {
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java
index 1739fd2..a17696a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MetaDataExportListener.java
@@ -21,8 +21,8 @@ import java.io.IOException;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -44,7 +44,7 @@ import org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer;
  * of the user performing the drop
  */
 public class MetaDataExportListener extends MetaStorePreEventListener {
-  public static final Log LOG = LogFactory.getLog(MetaDataExportListener.class);
+  public static final Logger LOG = LoggerFactory.getLogger(MetaDataExportListener.class);
 
   /** Configure the export listener */
   public MetaDataExportListener(Configuration config) {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
index e0cd398..2370ec0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
@@ -31,8 +31,8 @@ import java.util.Stack;
 import org.antlr.runtime.CommonToken;
 import org.antlr.runtime.tree.TreeWizard;
 import org.antlr.runtime.tree.TreeWizard.ContextVisitor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -102,7 +102,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 public class PTFTranslator {
 
-  private static final Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.parse");
+  private static final Logger LOG = LoggerFactory.getLogger("org.apache.hadoop.hive.ql.parse");
 
   HiveConf hCfg;
   LeadLagInfo llInfo;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
index debd5ac..c33bb66 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
@@ -29,8 +29,8 @@ import org.antlr.runtime.TokenStream;
 import org.antlr.runtime.tree.CommonTree;
 import org.antlr.runtime.tree.CommonTreeAdaptor;
 import org.antlr.runtime.tree.TreeAdaptor;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.Context;
 
 /**
@@ -39,7 +39,7 @@ import org.apache.hadoop.hive.ql.Context;
  */
 public class ParseDriver {
 
-  private static final Log LOG = LogFactory.getLog("hive.ql.parse.ParseDriver");
+  private static final Logger LOG = LoggerFactory.getLogger("hive.ql.parse.ParseDriver");
 
   /**
    * ANTLRNoCaseStringStream.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
index 16b4376..6f9948e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java
@@ -22,8 +22,8 @@ import java.util.List;
 import java.util.Set;
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.DriverContext;
@@ -51,7 +51,7 @@ import org.apache.hadoop.mapred.InputFormat;
  */
 public class ProcessAnalyzeTable implements NodeProcessor {
 
-  static final private Log LOG = LogFactory.getLog(ProcessAnalyzeTable.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(ProcessAnalyzeTable.class.getName());
 
   // shared plan utils for tez
   private GenTezUtils utils = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
index 0ddc221..f04b493 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
@@ -26,8 +26,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
@@ -39,7 +39,7 @@ import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
 
 public class QB {
 
-  private static final Log LOG = LogFactory.getLog("hive.ql.parse.QB");
+  private static final Logger LOG = LoggerFactory.getLogger("hive.ql.parse.QB");
 
   private final int numJoins = 0;
   private final int numGbys = 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java
index 36e65da..32aee48 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Implementation of the query block expression.
@@ -28,7 +28,7 @@ import org.apache.commons.logging.LogFactory;
 
 public class QBExpr {
 
-  private static final Log LOG = LogFactory.getLog("hive.ql.parse.QBExpr");
+  private static final Logger LOG = LoggerFactory.getLogger("hive.ql.parse.QBExpr");
 
   /**
    * Opcode.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
index 33c2f18..2ae8daa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
@@ -22,8 +22,8 @@ import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
@@ -51,7 +51,7 @@ public class QBMetaData {
   private final HashMap<String, DynamicPartitionCtx> aliasToDPCtx;
 
   @SuppressWarnings("unused")
-  private static final Log LOG = LogFactory.getLog(QBMetaData.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(QBMetaData.class.getName());
 
   public QBMetaData() {
     // Must be deterministic order map - see HIVE-8707

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
index 9072d7f..186c2a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
@@ -28,8 +28,8 @@ import java.util.Map;
 import java.util.Set;
 
 import org.antlr.runtime.tree.Tree;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.AnalyzeRewriteContext;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec;
 
@@ -111,7 +111,7 @@ public class QBParseInfo {
 
 
   @SuppressWarnings("unused")
-  private static final Log LOG = LogFactory.getLog(QBParseInfo.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(QBParseInfo.class.getName());
 
   public QBParseInfo(String alias, boolean isSubQ) {
     aliasToSrc = new HashMap<String, ASTNode>();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
index 5190bda..891b1f7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
@@ -27,8 +27,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
 
@@ -54,7 +54,7 @@ public class RowResolver implements Serializable{
   // TODO: Refactor this and do in a more object oriented manner
   private boolean isExprResolver;
 
-  private static final Log LOG = LogFactory.getLog(RowResolver.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(RowResolver.class.getName());
 
   public RowResolver() {
     rowSchema = new RowSchema();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 8927800..70beff7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -2230,7 +2230,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
             joinTree.getFilters().get(0).add(joinCond);
           } else {
-            LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
+            LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg());
             joinTree.getFiltersForPushing().get(0).add(joinCond);
           }
         } else {
@@ -2319,7 +2319,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
             joinTree.getFilters().get(1).add(joinCond);
           } else {
-            LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
+            LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg());
             joinTree.getFiltersForPushing().get(1).add(joinCond);
           }
         } else {
@@ -2339,7 +2339,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
         if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
           joinTree.getFilters().get(0).add(joinCond);
         } else {
-          LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
+          LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg());
           joinTree.getFiltersForPushing().get(0).add(joinCond);
         }
       } else {
@@ -2351,7 +2351,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
         if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
           joinTree.getFilters().get(1).add(joinCond);
         } else {
-          LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
+          LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg());
           joinTree.getFiltersForPushing().get(1).add(joinCond);
         }
       } else {
@@ -2501,7 +2501,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
             joinTree.getFilters().get(0).add(joinCond);
           } else {
-            LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
+            LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg());
             joinTree.getFiltersForPushing().get(0).add(joinCond);
           }
         } else {
@@ -2513,7 +2513,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
             joinTree.getFilters().get(1).add(joinCond);
           } else {
-            LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
+            LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS.getErrorCodedMsg());
             joinTree.getFiltersForPushing().get(1).add(joinCond);
           }
         } else {
@@ -8452,7 +8452,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           // for outer joins, it should not exceed 16 aliases (short type)
           if (!node.getNoOuterJoin() || !target.getNoOuterJoin()) {
             if (node.getRightAliases().length + target.getRightAliases().length + 1 > 16) {
-              LOG.info(ErrorMsg.JOINNODE_OUTERJOIN_MORETHAN_16);
+              LOG.info(ErrorMsg.JOINNODE_OUTERJOIN_MORETHAN_16.getErrorCodedMsg());
               continueScanning = continueJoinMerge();
               continue;
             }
@@ -10543,7 +10543,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           Table tbl = usedp.getTable();
 
           LOG.debug("validated " + usedp.getName());
-          LOG.debug(usedp.getTable());
+          LOG.debug(usedp.getTable().getTableName());
           conflictingArchive = ArchiveUtils
               .conflictingArchiveNameOrNull(db, tbl, usedp.getSpec());
         } catch (HiveException e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java
index cc0a7d1..a2042dc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TableAccessAnalyzer.java
@@ -23,8 +23,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.GroupByOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
@@ -58,7 +58,7 @@ import org.apache.hadoop.hive.ql.plan.SelectDesc;
  * for improvement through bucketing.
  */
 public class TableAccessAnalyzer {
-  private static final Log LOG = LogFactory.getLog(TableAccessAnalyzer.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(TableAccessAnalyzer.class.getName());
   private final ParseContext pGraphContext;
 
   public TableAccessAnalyzer() {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
index 81d02da..a8f9f50 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
@@ -26,8 +26,8 @@ import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
@@ -69,7 +69,7 @@ import com.google.common.collect.Interners;
  */
 public abstract class TaskCompiler {
 
-  protected final Log LOG = LogFactory.getLog(TaskCompiler.class);
+  protected final Logger LOG = LoggerFactory.getLogger(TaskCompiler.class);
 
   protected Hive db;
   protected LogHelper console;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
index a60527b..eca40be 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
@@ -30,8 +30,8 @@ import java.util.Set;
 import java.util.Stack;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.Context;
@@ -93,7 +93,7 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
  */
 public class TezCompiler extends TaskCompiler {
 
-  protected final Log LOG = LogFactory.getLog(TezCompiler.class);
+  protected final Logger LOG = LoggerFactory.getLogger(TezCompiler.class);
 
   public TezCompiler() {
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java
index 8ad28be..de1c043 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 
 /**
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
  * phase in query compilation.
  */
 public class TypeCheckCtx implements NodeProcessorCtx {
-  protected static final Log LOG = LogFactory.getLog(TypeCheckCtx.class);
+  protected static final Logger LOG = LoggerFactory.getLogger(TypeCheckCtx.class);
 
   /**
    * The row resolver of the previous operator. This field is used to generate
@@ -155,7 +155,7 @@ public class TypeCheckCtx implements NodeProcessorCtx {
    */
   public void setError(String error, ASTNode errorSrcNode) {
     if (LOG.isDebugEnabled()) {
-      // Log the callstack from which the error has been set.
+      // Logger the callstack from which the error has been set.
       LOG.debug("Setting error: [" + error + "] from "
           + ((errorSrcNode == null) ? "null" : errorSrcNode.toStringTree()), new Exception());
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index ab5d006..3a6535b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -32,8 +32,8 @@ import java.util.Set;
 import java.util.Stack;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
@@ -93,7 +93,7 @@ import com.google.common.collect.Lists;
  */
 public class TypeCheckProcFactory {
 
-  protected static final Log LOG = LogFactory.getLog(TypeCheckProcFactory.class
+  protected static final Logger LOG = LoggerFactory.getLogger(TypeCheckProcFactory.class
       .getName());
 
   protected TypeCheckProcFactory() {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java
index 5ee8b77..1ee4ea9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/UnionProcessor.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.parse;
 
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.NodeProcessor;
@@ -34,7 +34,7 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
  */
 public class UnionProcessor implements NodeProcessor {
 
-  static final private Log LOG = LogFactory.getLog(UnionProcessor.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(UnionProcessor.class.getName());
 
   @Override
   public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs)

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
index 1c0b79d..e87701f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
@@ -28,8 +28,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -70,7 +70,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc;
  * Cloned from GenTezUtils.
  */
 public class GenSparkUtils {
-  private static final Log LOG = LogFactory.getLog(GenSparkUtils.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(GenSparkUtils.class.getName());
 
   // sequence number is used to name vertices (e.g.: Map 1, Reduce 14, ...)
   private int sequenceNumber = 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java
index 3dd6d92..7a7b558 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkWork.java
@@ -25,8 +25,8 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
@@ -59,7 +59,7 @@ import com.google.common.base.Preconditions;
  * Cloned from GenTezWork.
  */
 public class GenSparkWork implements NodeProcessor {
-  static final private Log LOG = LogFactory.getLog(GenSparkWork.class.getName());
+  static final private Logger LOG = LoggerFactory.getLogger(GenSparkWork.class.getName());
 
   // instance of shared utils
   private GenSparkUtils utils = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
index 9ec7fd6..7e0e137 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
@@ -26,8 +26,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
@@ -97,7 +97,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 public class SparkCompiler extends TaskCompiler {
   private static final String CLASS_NAME = SparkCompiler.class.getName();
   private static final PerfLogger PERF_LOGGER = SessionState.getPerfLogger();
-  private static final Log LOGGER = LogFactory.getLog(SparkCompiler.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SparkCompiler.class);
 
   public SparkCompiler() {
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java
index 7104f89..4cc127a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.parse.spark;
 
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.NodeProcessor;
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
  * Cloned from tez's FileSinkProcessor.
  */
 public class SparkFileSinkProcessor implements NodeProcessor {
-  private static final Log LOGGER = LogFactory.getLog(SparkFileSinkProcessor.class.getName());
+  private static final Logger LOGGER = LoggerFactory.getLogger(SparkFileSinkProcessor.class.getName());
 
   /*
    * (non-Javadoc)
@@ -54,4 +54,4 @@ public class SparkFileSinkProcessor implements NodeProcessor {
     return true;
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java
index cd1301d..fa8a53a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java
@@ -24,8 +24,8 @@ import java.io.ObjectOutputStream;
 import java.util.Collection;
 import java.util.concurrent.Future;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -51,7 +51,7 @@ public class SparkPartitionPruningSinkOperator extends Operator<SparkPartitionPr
   @SuppressWarnings("deprecation")
   protected transient Serializer serializer;
   protected transient DataOutputBuffer buffer;
-  protected static final Log LOG = LogFactory.getLog(SparkPartitionPruningSinkOperator.class);
+  protected static final Logger LOG = LoggerFactory.getLogger(SparkPartitionPruningSinkOperator.class);
 
   @SuppressWarnings("deprecation")
   public void initializeOp(Configuration hconf) throws HiveException {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
index 7ab4e7a..e4e7c98 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkProcessAnalyzeTable.java
@@ -22,8 +22,8 @@ import java.util.List;
 import java.util.Set;
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.DriverContext;
@@ -57,7 +57,7 @@ import com.google.common.base.Preconditions;
  * Cloned from Tez ProcessAnalyzeTable.
  */
 public class SparkProcessAnalyzeTable implements NodeProcessor {
-  private static final Log LOGGER = LogFactory.getLog(SparkProcessAnalyzeTable.class.getName());
+  private static final Logger LOGGER = LoggerFactory.getLogger(SparkProcessAnalyzeTable.class.getName());
 
   // shared plan utils for spark
   private GenSparkUtils utils = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
index 3bc704f..c4bdaeb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
@@ -26,8 +26,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.mapred.JobConf;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
  */
 @SuppressWarnings({"serial"})
 public abstract class BaseWork extends AbstractOperatorDesc {
-  static final private Log LOG = LogFactory.getLog(BaseWork.class);
+  static final private Logger LOG = LoggerFactory.getLogger(BaseWork.class);
 
   // dummyOps is a reference to all the HashTableDummy operators in the
   // plan. These have to be separately initialized when we setup a task.
@@ -108,7 +108,7 @@ public abstract class BaseWork extends AbstractOperatorDesc {
 
   public abstract void replaceRoots(Map<Operator<?>, Operator<?>> replacementMap);
 
-  public abstract Set<Operator<?>> getAllRootOperators();
+  public abstract Set<Operator<? extends OperatorDesc>> getAllRootOperators();
 
   public Set<Operator<?>> getAllOperators() {
 
@@ -134,7 +134,7 @@ public abstract class BaseWork extends AbstractOperatorDesc {
    * Returns a set containing all leaf operators from the operator tree in this work.
    * @return a set containing all leaf operators in this operator tree.
    */
-  public Set<Operator<?>> getAllLeafOperators() {
+  public Set<Operator<? extends OperatorDesc>> getAllLeafOperators() {
     Set<Operator<?>> returnSet = new LinkedHashSet<Operator<?>>();
     Set<Operator<?>> opSet = getAllRootOperators();
     Stack<Operator<?>> opStack = new Stack<Operator<?>>();

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java
index 1da7f85..0d04e84 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java
@@ -25,8 +25,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hive.ql.exec.Utilities;
 public class ConditionalResolverCommonJoin implements ConditionalResolver, Serializable {
 
   private static final long serialVersionUID = 1L;
-  private static final Log LOG = LogFactory.getLog(ConditionalResolverCommonJoin.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ConditionalResolverCommonJoin.class);
 
   /**
    * ConditionalResolverSkewJoinCtx.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
index 3e4c9a3..3f38f74 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
@@ -24,8 +24,8 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
@@ -50,7 +50,7 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 @Explain(displayName = "Create Table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CreateTableDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
-  private static Log LOG = LogFactory.getLog(CreateTableDesc.class);
+  private static Logger LOG = LoggerFactory.getLogger(CreateTableDesc.class);
   String databaseName;
   String tableName;
   boolean isExternal;
@@ -486,7 +486,7 @@ public class CreateTableDesc extends DDLDesc implements Serializable {
         try {
           pti = TypeInfoFactory.getPrimitiveTypeInfo(fs.getType());
         } catch (Exception err) {
-          LOG.error(err);
+          LOG.error("Failed to get type info", err);
         }
         if(null == pti){
           throw new SemanticException(ErrorMsg.PARTITION_COLUMN_NON_PRIMITIVE.getMsg() + " Found "

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
index b5d2ddf..fc175b9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
@@ -24,8 +24,8 @@ import java.util.Arrays;
 import java.util.List;
 
 import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;
@@ -51,8 +51,8 @@ public class ExprNodeGenericFuncDesc extends ExprNodeDesc implements
 
   private static final long serialVersionUID = 1L;
 
-  private static final Log LOG = LogFactory
-      .getLog(ExprNodeGenericFuncDesc.class.getName());
+  private static final Logger LOG = LoggerFactory
+      .getLogger(ExprNodeGenericFuncDesc.class.getName());
 
   /**
    * In case genericUDF is Serializable, we will serialize the object.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
index 0dc2fbd..87c15a2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
@@ -30,8 +30,8 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -63,7 +63,7 @@ import com.google.common.collect.Interner;
 @SuppressWarnings({"serial", "deprecation"})
 public class MapWork extends BaseWork {
 
-  private static final Log LOG = LogFactory.getLog(MapWork.class);
+  private static final Logger LOG = LoggerFactory.getLogger(MapWork.class);
 
   // use LinkedHashMap to make sure the iteration order is
   // deterministic, to ease testing

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
index 4461a1b..3ec3b1f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
@@ -19,8 +19,8 @@
 package org.apache.hadoop.hive.ql.plan;
 
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.PTFUtils;
 import org.apache.hadoop.hive.ql.parse.LeadLagInfo;
@@ -39,7 +39,7 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class PTFDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
   @SuppressWarnings("unused")
-  private static final Log LOG = LogFactory.getLog(PTFDesc.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(PTFDesc.class.getName());
 
   PartitionedTableFunctionDef funcDef;
   LeadLagInfo llInfo;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
index f17c063..3bdd3e7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
@@ -29,8 +29,8 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -81,7 +81,7 @@ import org.apache.hadoop.mapred.TextInputFormat;
  */
 public final class PlanUtils {
 
-  protected static final Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.plan.PlanUtils");
+  protected static final Logger LOG = LoggerFactory.getLogger("org.apache.hadoop.hive.ql.plan.PlanUtils");
 
   private static long countForMapJoinDumpFilePrefix = 0;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
index 4fed49e..615739e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
@@ -22,8 +22,8 @@ import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
@@ -119,7 +119,7 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
   // used by spark mode to decide whether global order is needed
   private transient boolean hasOrderBy = false;
 
-  private static transient Log LOG = LogFactory.getLog(ReduceSinkDesc.class);
+  private static transient Logger LOG = LoggerFactory.getLogger(ReduceSinkDesc.class);
   public ReduceSinkDesc() {
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
index 8d5f77c..0222c23 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
@@ -26,8 +26,8 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorUtils;
@@ -60,7 +60,7 @@ public class ReduceWork extends BaseWork {
     super(name);
   }
 
-  private static transient final Log LOG = LogFactory.getLog(ReduceWork.class);
+  private static transient final Logger LOG = LoggerFactory.getLogger(ReduceWork.class);
 
   // schema of the map-reduce 'key' object - this is homogeneous
   private TableDesc keyDesc;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java
index 17c5ad7..8b82c66 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java
@@ -32,8 +32,8 @@ import java.util.Set;
 
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.commons.lang3.tuple.Pair;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.plan.TezEdgeProperty.EdgeType;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
@@ -65,7 +65,7 @@ public class TezWork extends AbstractOperatorDesc {
     }
   }
 
-  private static transient final Log LOG = LogFactory.getLog(TezWork.class);
+  private static transient final Logger LOG = LoggerFactory.getLogger(TezWork.class);
 
   private static int counter;
   private final String name;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
index fca671c..d53a5f4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
@@ -24,8 +24,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -52,7 +52,7 @@ public class ExprWalkerInfo implements NodeProcessorCtx {
 
   }
 
-  protected static final Log LOG = LogFactory.getLog(OpProcFactory.class.getName());
+  protected static final Logger LOG = LoggerFactory.getLogger(OpProcFactory.class.getName());
   private Operator<? extends OperatorDesc> op = null;
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
index 64efbdd..9bd1847 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
@@ -23,8 +23,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -54,7 +54,7 @@ import org.apache.hadoop.hive.ql.ppd.ExprWalkerInfo.ExprInfo;
  */
 public final class ExprWalkerProcFactory {
 
-  private static final Log LOG = LogFactory.getLog(ExprWalkerProcFactory.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(ExprWalkerProcFactory.class.getName());
 
   /**
    * ColumnExprProcessor.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
index 8566374..5b85c93 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
@@ -28,8 +28,8 @@ import java.util.Map.Entry;
 import java.util.Set;
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
@@ -90,7 +90,7 @@ import org.apache.hadoop.mapred.JobConf;
  */
 public final class OpProcFactory {
 
-  protected static final Log LOG = LogFactory.getLog(OpProcFactory.class
+  protected static final Logger LOG = LoggerFactory.getLogger(OpProcFactory.class
     .getName());
 
   private static ExprWalkerInfo getChildWalkerInfo(Operator<?> current, OpWalkerInfo owi) {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
index 7f26f0f..2312798 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
@@ -21,8 +21,8 @@ import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.CommonJoinOperator;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
 import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator;
@@ -81,7 +81,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
  */
 public class PredicatePushDown implements Transform {
 
-  private static final Log LOG = LogFactory.getLog(PredicatePushDown.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PredicatePushDown.class);
   private ParseContext pGraphContext;
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java b/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java
index 44159c5..7aa91f2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/SyntheticJoinPredicate.java
@@ -26,8 +26,8 @@ import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.exec.CommonJoinOperator;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -62,7 +62,7 @@ import org.apache.hadoop.hive.ql.plan.OperatorDesc;
  */
 public class SyntheticJoinPredicate implements Transform {
 
-  private static transient Log LOG = LogFactory.getLog(SyntheticJoinPredicate.class.getName());
+  private static transient Logger LOG = LoggerFactory.getLogger(SyntheticJoinPredicate.class.getName());
 
   @Override
   public ParseContext transform(ParseContext pctx) throws SemanticException {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
index d2ac993..e1edcaf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
@@ -22,13 +22,13 @@ import java.util.Arrays;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * AddResourceProcessor.
@@ -36,7 +36,7 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
  */
 public class AddResourceProcessor implements CommandProcessor {
 
-  public static final Log LOG = LogFactory.getLog(AddResourceProcessor.class
+  public static final Logger LOG = LoggerFactory.getLogger(AddResourceProcessor.class
       .getName());
   public static final LogHelper console = new LogHelper(LOG);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
index a2656a7..c343b0e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
@@ -27,8 +27,8 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.metadata.*;
@@ -103,7 +103,7 @@ public final class CommandProcessorFactory {
     }
   }
 
-  static Log LOG = LogFactory.getLog(CommandProcessorFactory.class);
+  static Logger LOG = LoggerFactory.getLogger(CommandProcessorFactory.class);
   public static CommandProcessor get(String[] cmd, HiveConf conf)
       throws SQLException {
     CommandProcessor result = getForHiveCommand(cmd, conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandUtil.java
index ffe9c0b..d98b30c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandUtil.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hive.ql.processors;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
@@ -34,7 +34,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import com.google.common.base.Joiner;
 
 class CommandUtil {
-  public static final Log LOG = LogFactory.getLog(CommandUtil.class);
+  public static final Logger LOG = LoggerFactory.getLogger(CommandUtil.class);
 
   /**
    * Authorize command of given type and arguments

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
index 7b79f64..6981344 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
@@ -31,8 +31,6 @@ import java.util.concurrent.atomic.AtomicInteger;
 import org.apache.commons.compress.archivers.jar.JarArchiveEntry;
 import org.apache.commons.compress.archivers.jar.JarArchiveOutputStream;
 import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
@@ -45,6 +43,8 @@ import org.apache.tools.ant.BuildException;
 import org.apache.tools.ant.Project;
 import org.apache.tools.ant.types.Path;
 import org.codehaus.groovy.ant.Groovyc;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.io.Files;
@@ -60,7 +60,7 @@ import com.google.common.io.Files;
  */
 public class CompileProcessor implements CommandProcessor {
 
-  public static final Log LOG = LogFactory.getLog(CompileProcessor.class.getName());
+  public static final Logger LOG = LoggerFactory.getLogger(CompileProcessor.class.getName());
   public static final LogHelper console = new LogHelper(LOG);
   public static final String IO_TMP_DIR = "java.io.tmpdir";
   public static final String GROOVY = "GROOVY";

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
index 5eaadbb..1acdc95 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/CryptoProcessor.java
@@ -24,8 +24,8 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.ParseException;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
@@ -40,7 +40,7 @@ import java.util.Arrays;
  * only by Hive unit & queries tests.
  */
 public class CryptoProcessor implements CommandProcessor {
-  public static final Log LOG = LogFactory.getLog(CryptoProcessor.class.getName());
+  public static final Logger LOG = LoggerFactory.getLogger(CryptoProcessor.class.getName());
 
   private HadoopShims.HdfsEncryptionShim encryptionShim;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
index 736fa9c..d34f253 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
@@ -22,13 +22,13 @@ import java.util.Arrays;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * DeleteResourceProcessor.
@@ -36,7 +36,7 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
  */
 public class DeleteResourceProcessor implements CommandProcessor {
 
-  public static final Log LOG = LogFactory.getLog(DeleteResourceProcessor.class.getName());
+  public static final Logger LOG = LoggerFactory.getLogger(DeleteResourceProcessor.class.getName());
   public static final LogHelper console = new LogHelper(LOG);
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
index c3d5f81..3899d2c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
@@ -22,8 +22,8 @@ import java.io.PrintStream;
 import java.util.Arrays;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.hive.conf.HiveVariableSource;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
  */
 public class DfsProcessor implements CommandProcessor {
 
-  public static final Log LOG = LogFactory.getLog(DfsProcessor.class.getName());
+  public static final Logger LOG = LoggerFactory.getLogger(DfsProcessor.class.getName());
   public static final LogHelper console = new LogHelper(LOG);
   public static final String DFS_RESULT_HEADER = "DFS Output";
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
index b84c9dd..7a59833 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.processors;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
  * used for reload auxiliary and jars without restarting hive server2
  */
 public class ReloadProcessor implements CommandProcessor{
-  private static final Log LOG = LogFactory.getLog(ReloadProcessor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ReloadProcessor.class);
 
   @Override
   public void init() {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
index 01fb748..82e7fc5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
@@ -23,8 +23,8 @@ import java.util.List;
 
 import com.google.common.base.Function;
 import com.google.common.collect.Iterators;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -65,7 +65,7 @@ import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider;
 @Private
 public class AuthorizationPreEventListener extends MetaStorePreEventListener {
 
-  public static final Log LOG = LogFactory.getLog(
+  public static final Logger LOG = LoggerFactory.getLogger(
       AuthorizationPreEventListener.class);
 
   private static final ThreadLocal<Configuration> tConfig = new ThreadLocal<Configuration>() {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
index 25c25da..a1299a4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.security.authorization;
 
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
 import org.apache.hadoop.hive.metastore.api.Database;
@@ -102,7 +102,7 @@ public abstract class HiveAuthorizationProviderBase implements
 
   private Configuration conf;
 
-  public static final Log LOG = LogFactory.getLog(
+  public static final Logger LOG = LoggerFactory.getLogger(
       HiveAuthorizationProvider.class);
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
index 89e3513..7992a70 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
@@ -27,8 +27,8 @@ import java.util.List;
 
 import javax.security.auth.login.LoginException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -69,7 +69,7 @@ public class StorageBasedAuthorizationProvider extends HiveAuthorizationProvider
   private Warehouse wh;
   private boolean isRunFromMetaStore = false;
 
-  private static Log LOG = LogFactory.getLog(StorageBasedAuthorizationProvider.class);
+  private static Logger LOG = LoggerFactory.getLogger(StorageBasedAuthorizationProvider.class);
 
   /**
    * Make sure that the warehouse variable is set up properly.

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
index 24322d0..6bad99b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.security.authorization.plugin;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl;
@@ -36,7 +36,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 @Private
 public class AuthorizationMetaStoreFilterHook extends DefaultMetaStoreFilterHookImpl {
 
-  public static final Log LOG = LogFactory.getLog(AuthorizationMetaStoreFilterHook.class);
+  public static final Logger LOG = LoggerFactory.getLogger(AuthorizationMetaStoreFilterHook.class);
 
   public AuthorizationMetaStoreFilterHook(HiveConf conf) {
     super(conf);
@@ -78,13 +78,13 @@ public class AuthorizationMetaStoreFilterHook extends DefaultMetaStoreFilterHook
     try {
       return ss.getAuthorizerV2().filterListCmdObjects(listObjs, authzContextBuilder.build());
     } catch (HiveAuthzPluginException e) {
-      LOG.error(e);
+      LOG.error("Authorization error", e);
       throw new MetaException(e.getMessage());
     } catch (HiveAccessControlException e) {
       // authorization error is not really expected in a filter call
       // the impl should have just filtered out everything. A checkPrivileges call
       // would have already been made to authorize this action
-      LOG.error(e);
+      LOG.error("AccessControlException", e);
       throw new MetaException(e.getMessage());
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java
index b832fc8..26e3a2c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
 
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObje
  */
 public class DummyHiveAuthorizationValidator implements HiveAuthorizationValidator {
 
-  public static final Log LOG = LogFactory.getLog(DummyHiveAuthorizationValidator.class);
+  public static final Logger LOG = LoggerFactory.getLogger(DummyHiveAuthorizationValidator.class);
 
   @Override
   public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
index 329781c..b6b2699 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
@@ -29,8 +29,8 @@ import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -68,7 +68,7 @@ public class SQLAuthorizationUtils {
   private static final String[] SUPPORTED_PRIVS = { "INSERT", "UPDATE", "DELETE", "SELECT" };
   private static final Set<String> SUPPORTED_PRIVS_SET = new HashSet<String>(
       Arrays.asList(SUPPORTED_PRIVS));
-  public static final Log LOG = LogFactory.getLog(SQLAuthorizationUtils.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SQLAuthorizationUtils.class);
 
   /**
    * Create thrift privileges bag

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
index 2f6e26b..d5c3a1a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
@@ -25,8 +25,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -85,7 +85,7 @@ public class SQLStdHiveAccessController implements HiveAccessController {
   private final String HAS_ADMIN_PRIV_MSG = "grantor need to have ADMIN OPTION on role being"
       + " granted and have it as a current role for this action.";
   private final HiveAuthzSessionContext sessionCtx;
-  public static final Log LOG = LogFactory.getLog(SQLStdHiveAccessController.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SQLStdHiveAccessController.class);
 
   public SQLStdHiveAccessController(HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf,
       HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
index 31357c4..ee57f69 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
@@ -21,8 +21,8 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -48,7 +48,7 @@ public class SQLStdHiveAuthorizationValidator implements HiveAuthorizationValida
   private final HiveAuthenticationProvider authenticator;
   private final SQLStdHiveAccessControllerWrapper privController;
   private final HiveAuthzSessionContext ctx;
-  public static final Log LOG = LogFactory.getLog(SQLStdHiveAuthorizationValidator.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SQLStdHiveAuthorizationValidator.class);
 
   public SQLStdHiveAuthorizationValidator(HiveMetastoreClientFactory metastoreClientFactory,
       HiveConf conf, HiveAuthenticationProvider authenticator,

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java
index b20e975..7289426 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java
@@ -28,11 +28,11 @@ import java.util.List;
 import java.util.Map;
 import java.io.File;
 import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.slf4j.LoggerFactory;
+
 import groovy.grape.Grape;
-import groovy.grape.GrapeIvy;
 import groovy.lang.GroovyClassLoader;
 
 
@@ -41,7 +41,7 @@ public class DependencyResolver {
   private static final String HIVE_HOME = "HIVE_HOME";
   private static final String HIVE_CONF_DIR = "HIVE_CONF_DIR";
   private String ivysettingsPath;
-  private static LogHelper _console = new LogHelper(LogFactory.getLog("DependencyResolver"));
+  private static LogHelper _console = new LogHelper(LoggerFactory.getLogger("DependencyResolver"));
 
   public DependencyResolver() {
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java b/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
index b642e27..fb7dda5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
@@ -18,10 +18,10 @@
 package org.apache.hadoop.hive.ql.session;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.io.IOUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.*;
 import java.sql.SQLException;
@@ -33,7 +33,7 @@ import java.util.List;
  * for accessing, reading, writing, and removing the file.
  */
 public class OperationLog {
-  private static final Log LOG = LogFactory.getLog(OperationLog.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(OperationLog.class.getName());
 
   private final String operationName;
   private final LogFile logFile;
@@ -125,9 +125,9 @@ public class OperationLog {
    * Wrapper for read/write the operation log file
    */
   private class LogFile {
-    private File file;
+    private final File file;
     private BufferedReader in;
-    private PrintStream out;
+    private final PrintStream out;
     private volatile boolean isRemoved;
 
     LogFile(File file) throws FileNotFoundException {
@@ -169,7 +169,7 @@ public class OperationLog {
 
     private void resetIn() {
       if (in != null) {
-        IOUtils.cleanup(LOG, in);
+        IOUtils.closeStream(in);
         in = null;
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 34ec4d8..ff875df 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -42,8 +42,8 @@ import java.util.concurrent.CancellationException;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
@@ -99,7 +99,7 @@ import com.google.common.base.Preconditions;
  * configuration information
  */
 public class SessionState {
-  private static final Log LOG = LogFactory.getLog(SessionState.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SessionState.class);
 
   private static final String TMP_PREFIX = "_tmp_space.db";
   private static final String LOCAL_SESSION_PATH_KEY = "_hive.local.session.path";
@@ -265,9 +265,9 @@ public class SessionState {
    */
   private Timestamp queryCurrentTimestamp;
 
-  private ResourceMaps resourceMaps;
+  private final ResourceMaps resourceMaps;
 
-  private DependencyResolver dependencyResolver;
+  private final DependencyResolver dependencyResolver;
   /**
    * Get the lineage state stored in this session.
    *
@@ -934,14 +934,14 @@ public class SessionState {
    */
   public static class LogHelper {
 
-    protected Log LOG;
+    protected Logger LOG;
     protected boolean isSilent;
 
-    public LogHelper(Log LOG) {
+    public LogHelper(Logger LOG) {
       this(LOG, false);
     }
 
-    public LogHelper(Log LOG, boolean isSilent) {
+    public LogHelper(Logger LOG, boolean isSilent) {
       this.LOG = LOG;
       this.isSilent = isSilent;
     }
@@ -1013,7 +1013,7 @@ public class SessionState {
    */
   public static LogHelper getConsole() {
     if (_console == null) {
-      Log LOG = LogFactory.getLog("SessionState");
+      Logger LOG = LoggerFactory.getLogger("SessionState");
       _console = new LogHelper(LOG);
     }
     return _console;
@@ -1543,7 +1543,7 @@ public class SessionState {
         }
       }
     } catch (Exception e) {
-      LOG.info(e);
+      LOG.info("Failed to remove classloaders from DataNucleus ", e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java
index 5440dc3..9b66024 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.stats;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
@@ -33,7 +33,7 @@ import org.apache.hadoop.mapred.RunningJob;
 
 public class CounterStatsAggregator implements StatsAggregator {
 
-  private static final Log LOG = LogFactory.getLog(CounterStatsAggregator.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(CounterStatsAggregator.class.getName());
 
   private Counters counters;
   private JobClient jc;

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorSpark.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorSpark.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorSpark.java
index 303b75c..7ac01a7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorSpark.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorSpark.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.stats;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.spark.SparkTask;
@@ -27,7 +27,7 @@ import org.apache.hive.spark.counter.SparkCounters;
 public class CounterStatsAggregatorSpark
   implements StatsAggregator {
 
-  private static final Log LOG = LogFactory.getLog(CounterStatsAggregatorSpark.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CounterStatsAggregatorSpark.class);
 
   private SparkCounters sparkCounters;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorTez.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorTez.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorTez.java
index 9a7ad96..bb51fea 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorTez.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregatorTez.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.stats;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.tez.TezTask;
 import org.apache.tez.common.counters.TezCounters;
@@ -32,7 +32,7 @@ import org.apache.tez.common.counters.TezCounters;
  */
 public class CounterStatsAggregatorTez implements StatsAggregator {
 
-  private static final Log LOG = LogFactory.getLog(CounterStatsAggregatorTez.class.getName());
+  private static final Logger LOG = LoggerFactory.getLogger(CounterStatsAggregatorTez.class.getName());
 
   private TezCounters counters;
   private final CounterStatsAggregator mrAggregator;


Mime
View raw message