hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aajis...@apache.org
Subject [1/6] hadoop git commit: HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common.
Date Mon, 24 Jul 2017 02:15:30 GMT
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 c12bf9a12 -> 7e583a382


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
index bff5eec..5b32798 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
@@ -23,9 +23,9 @@ import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.Arrays;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.XDR;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This is a file handle use by the NFS clients.
@@ -33,7 +33,7 @@ import org.apache.hadoop.oncrpc.XDR;
  * on subsequent operations to reference the file.
  */
 public class FileHandle {
-  private static final Log LOG = LogFactory.getLog(FileHandle.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FileHandle.class);
   private static final String HEXES = "0123456789abcdef";
   private static final int HANDLE_LEN = 32;
   private byte[] handle; // Opaque handle

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
index 80faca5..00e6d9f 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
@@ -17,13 +17,13 @@
  */
 package org.apache.hadoop.nfs.nfs3;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.oncrpc.RpcProgram;
 import org.apache.hadoop.oncrpc.SimpleTcpServer;
 import org.apache.hadoop.portmap.PortmapMapping;
 import org.apache.hadoop.util.ShutdownHookManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.hadoop.util.ExitUtil.terminate;
 
@@ -32,7 +32,7 @@ import static org.apache.hadoop.util.ExitUtil.terminate;
  * Only TCP server is supported and UDP is not supported.
  */
 public abstract class Nfs3Base {
-  public static final Log LOG = LogFactory.getLog(Nfs3Base.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Nfs3Base.class);
   private final RpcProgram rpcProgram;
   private int nfsBoundPort; // Will set after server starts
 
@@ -54,7 +54,7 @@ public abstract class Nfs3Base {
       try {
         rpcProgram.register(PortmapMapping.TRANSPORT_TCP, nfsBoundPort);
       } catch (Throwable e) {
-        LOG.fatal("Failed to register the NFSv3 service.", e);
+        LOG.error("Failed to register the NFSv3 service.", e);
         terminate(1, e);
       }
     }
@@ -67,7 +67,7 @@ public abstract class Nfs3Base {
     try {
       tcpServer.run();
     } catch (Throwable e) {
-      LOG.fatal("Failed to start the TCP server.", e);
+      LOG.error("Failed to start the TCP server.", e);
       if (tcpServer.getBoundPort() > 0) {
         rpcProgram.unregister(PortmapMapping.TRANSPORT_TCP,
             tcpServer.getBoundPort());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java
index 7ba37c9..c8528ba 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java
@@ -19,18 +19,19 @@ package org.apache.hadoop.oncrpc;
 
 import java.util.Arrays;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.jboss.netty.channel.MessageEvent;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A simple client that registers an RPC program with portmap.
  */
 public class RegistrationClient extends SimpleTcpClient {
-  public static final Log LOG = LogFactory.getLog(RegistrationClient.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(RegistrationClient.class);
 
   public RegistrationClient(String host, int port, XDR request) {
     super(host, port, request);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
index aa4b948..0ae3c23 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
@@ -17,17 +17,17 @@
  */
 package org.apache.hadoop.oncrpc;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.security.Credentials;
 import org.apache.hadoop.oncrpc.security.Verifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Represents an RPC message of type RPC call as defined in RFC 1831
  */
 public class RpcCall extends RpcMessage {
   public static final int RPC_VERSION = 2;
-  private static final Log LOG = LogFactory.getLog(RpcCall.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RpcCall.class);
   
   public static RpcCall read(XDR xdr) {
     return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()),
@@ -60,7 +60,7 @@ public class RpcCall extends RpcMessage {
     this.credentials = credential;
     this.verifier = verifier;
     if (LOG.isTraceEnabled()) {
-      LOG.trace(this);
+      LOG.trace(this.toString());
     }
     validate();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
index c541cd6..5c059aa 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
@@ -23,8 +23,6 @@ import java.net.InetSocketAddress;
 import java.net.SocketAddress;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState;
 import org.apache.hadoop.oncrpc.security.Verifier;
 import org.apache.hadoop.oncrpc.security.VerifierNone;
@@ -35,13 +33,15 @@ import org.jboss.netty.buffer.ChannelBuffers;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.jboss.netty.channel.MessageEvent;
 import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Class for writing RPC server programs based on RFC 1050. Extend this class
  * and implement {@link #handleInternal} to handle the requests received.
  */
 public abstract class RpcProgram extends SimpleChannelUpstreamHandler {
-  static final Log LOG = LogFactory.getLog(RpcProgram.class);
+  static final Logger LOG = LoggerFactory.getLogger(RpcProgram.class);
   public static final int RPCB_PORT = 111;
   private final String program;
   private final String host;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java
index cbc9943..cebebd2 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.oncrpc;
 
 import java.nio.ByteBuffer;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBuffers;
 import org.jboss.netty.channel.Channel;
@@ -29,6 +27,8 @@ import org.jboss.netty.channel.Channels;
 import org.jboss.netty.channel.MessageEvent;
 import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
 import org.jboss.netty.handler.codec.frame.FrameDecoder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public final class RpcUtil {
   /**
@@ -63,7 +63,8 @@ public final class RpcUtil {
    * each RPC client.
    */
   static class RpcFrameDecoder extends FrameDecoder {
-    public static final Log LOG = LogFactory.getLog(RpcFrameDecoder.class);
+    public static final Logger LOG =
+        LoggerFactory.getLogger(RpcFrameDecoder.class);
     private ChannelBuffer currentFrame;
 
     @Override
@@ -107,8 +108,8 @@ public final class RpcUtil {
    * request into a RpcInfo instance.
    */
   static final class RpcMessageParserStage extends SimpleChannelUpstreamHandler {
-    private static final Log LOG = LogFactory
-        .getLog(RpcMessageParserStage.class);
+    private static final Logger LOG = LoggerFactory
+        .getLogger(RpcMessageParserStage.class);
 
     @Override
     public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java
index b72153a..23b6682 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java
@@ -17,20 +17,21 @@
  */
 package org.apache.hadoop.oncrpc;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.channel.ChannelHandlerContext;
 import org.jboss.netty.channel.ChannelStateEvent;
 import org.jboss.netty.channel.ExceptionEvent;
 import org.jboss.netty.channel.MessageEvent;
 import org.jboss.netty.channel.SimpleChannelHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * A simple TCP based RPC client handler used by {@link SimpleTcpServer}.
  */
 public class SimpleTcpClientHandler extends SimpleChannelHandler {
-  public static final Log LOG = LogFactory.getLog(SimpleTcpClient.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SimpleTcpClient.class);
   protected final XDR request;
 
   public SimpleTcpClientHandler(XDR request) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java
index bd48b15..177fa3d 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.oncrpc;
 import java.net.InetSocketAddress;
 import java.util.concurrent.Executors;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.jboss.netty.bootstrap.ServerBootstrap;
 import org.jboss.netty.channel.Channel;
 import org.jboss.netty.channel.ChannelFactory;
@@ -30,12 +28,15 @@ import org.jboss.netty.channel.ChannelPipelineFactory;
 import org.jboss.netty.channel.Channels;
 import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
 import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Simple UDP server implemented using netty.
  */
 public class SimpleTcpServer {
-  public static final Log LOG = LogFactory.getLog(SimpleTcpServer.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SimpleTcpServer.class);
   protected final int port;
   protected int boundPort = -1; // Will be set after server starts
   protected final SimpleChannelUpstreamHandler rpcProgram;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java
index d691aba..e65003c 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java
@@ -20,20 +20,21 @@ package org.apache.hadoop.oncrpc;
 import java.net.InetSocketAddress;
 import java.util.concurrent.Executors;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.jboss.netty.bootstrap.ConnectionlessBootstrap;
 import org.jboss.netty.channel.Channel;
 import org.jboss.netty.channel.Channels;
 import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
 import org.jboss.netty.channel.socket.DatagramChannelFactory;
 import org.jboss.netty.channel.socket.nio.NioDatagramChannelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Simple UDP server implemented based on netty.
  */
 public class SimpleUdpServer {
-  public static final Log LOG = LogFactory.getLog(SimpleUdpServer.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SimpleUdpServer.class);
   private final int SEND_BUFFER_SIZE = 65536;
   private final int RECEIVE_BUFFER_SIZE = 65536;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
index fe4350b..64edf48 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
@@ -18,16 +18,16 @@
 package org.apache.hadoop.oncrpc.security;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.XDR;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Base class for all credentials. Currently we only support 3 different types
  * of auth flavors: AUTH_NONE, AUTH_SYS, and RPCSEC_GSS.
  */
 public abstract class Credentials extends RpcAuthInfo {
-  public static final Log LOG = LogFactory.getLog(Credentials.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Credentials.class);
 
   public static Credentials readFlavorAndCredentials(XDR xdr) {
     AuthFlavor flavor = AuthFlavor.fromValue(xdr.readInt());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
index 93efba8..4a674e8 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
@@ -19,13 +19,14 @@ package org.apache.hadoop.oncrpc.security;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcCall;
 import org.apache.hadoop.oncrpc.XDR;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public abstract class SecurityHandler {
-  public static final Log LOG = LogFactory.getLog(SecurityHandler.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(SecurityHandler.class);
   
   public abstract String getUser();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java
index 7586fda..123999d 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java
@@ -22,8 +22,6 @@ import java.net.SocketAddress;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcProgram;
 import org.apache.hadoop.oncrpc.RpcUtil;
 import org.apache.hadoop.util.StringUtils;
@@ -41,12 +39,14 @@ import org.jboss.netty.handler.timeout.IdleStateHandler;
 import org.jboss.netty.util.HashedWheelTimer;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Portmap service for binding RPC protocols. See RFC 1833 for details.
  */
 final class Portmap {
-  private static final Log LOG = LogFactory.getLog(Portmap.class);
+  private static final Logger LOG = LoggerFactory.getLogger(Portmap.class);
   private static final int DEFAULT_IDLE_TIME_MILLISECONDS = 5000;
 
   private ConnectionlessBootstrap udpServer;
@@ -65,7 +65,7 @@ final class Portmap {
       pm.start(DEFAULT_IDLE_TIME_MILLISECONDS,
           new InetSocketAddress(port), new InetSocketAddress(port));
     } catch (Throwable e) {
-      LOG.fatal("Failed to start the server. Cause:", e);
+      LOG.error("Failed to start the server. Cause:", e);
       pm.shutdown();
       System.exit(-1);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e583a38/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
index 67175d0..0bc380f 100644
--- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
+++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.portmap;
 
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply;
 import org.apache.hadoop.oncrpc.RpcCall;
 import org.apache.hadoop.oncrpc.RpcInfo;
@@ -39,6 +37,8 @@ import org.jboss.netty.channel.group.ChannelGroup;
 import org.jboss.netty.handler.timeout.IdleState;
 import org.jboss.netty.handler.timeout.IdleStateAwareChannelUpstreamHandler;
 import org.jboss.netty.handler.timeout.IdleStateEvent;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler {
   static final int PROGRAM = 100000;
@@ -51,7 +51,8 @@ final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler
{
   static final int PMAPPROC_DUMP = 4;
   static final int PMAPPROC_GETVERSADDR = 9;
 
-  private static final Log LOG = LogFactory.getLog(RpcProgramPortmap.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(RpcProgramPortmap.class);
 
   private final ConcurrentHashMap<String, PortmapMapping> map = new ConcurrentHashMap<String,
PortmapMapping>();
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message