hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r990460 - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/fs/s3native/ src/java/org/apache/hadoop/io/ src/java/org/apache/hadoop/io/compress/ src/java/org/apache/hadoop/io/compress/zlib/ src/java/org/apache/hadoop/io/file/tfile/ src/...
Date Sat, 28 Aug 2010 22:44:11 GMT
Author: szetszwo
Date: Sat Aug 28 22:44:10 2010
New Revision: 990460

URL: http://svn.apache.org/viewvc?rev=990460&view=rev
Log:
HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..).  Contributed by Erik Steffl

Modified:
    hadoop/common/trunk/CHANGES.txt
    hadoop/common/trunk/src/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
    hadoop/common/trunk/src/java/org/apache/hadoop/io/SequenceFile.java
    hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CodecPool.java
    hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
    hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
    hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/Compression.java
    hadoop/common/trunk/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java
    hadoop/common/trunk/src/java/org/apache/hadoop/net/NetworkTopology.java
    hadoop/common/trunk/src/java/org/apache/hadoop/security/Groups.java
    hadoop/common/trunk/src/java/org/apache/hadoop/security/SaslRpcClient.java
    hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
    hadoop/common/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java
    hadoop/common/trunk/src/java/org/apache/hadoop/util/NativeCodeLoader.java
    hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java
    hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
    hadoop/common/trunk/src/test/core/org/apache/hadoop/io/TestArrayFile.java
    hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
    hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java
    hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/AbstractDaemonCluster.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Sat Aug 28 22:44:10 2010
@@ -127,6 +127,9 @@ Trunk (unreleased changes)
 
   OPTIMIZATIONS
 
+    HADOOP-6884. Add LOG.isDebugEnabled() guard for each LOG.debug(..).
+    (Erik Steffl via szetszwo)
+
   BUG FIXES
 
     HADOOP-6638. try to relogin in a case of failed RPC connection (expired tgt) 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java Sat
Aug 28 22:44:10 2010
@@ -331,7 +331,9 @@ public class NativeS3FileSystem extends 
       throw new IOException("File already exists:"+f);
     }
     
-    LOG.debug("Creating new file '" + f + "' in S3");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("Creating new file '" + f + "' in S3");
+    }
     Path absolutePath = makeAbsolute(f);
     String key = pathToKey(absolutePath);
     return new FSDataOutputStream(new NativeS3FsOutputStream(getConf(), store,
@@ -344,7 +346,10 @@ public class NativeS3FileSystem extends 
     try {
       status = getFileStatus(f);
     } catch (FileNotFoundException e) {
-      LOG.debug("Delete called for '" + f + "' but file does not exist, so returning false");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Delete called for '" + f +
+            "' but file does not exist, so returning false");
+      }
       return false;
     }
     Path absolutePath = makeAbsolute(f);
@@ -356,7 +361,9 @@ public class NativeS3FileSystem extends 
 
       createParent(f);
 
-      LOG.debug("Deleting directory '" + f  + "'");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Deleting directory '" + f  + "'");
+      }
       String priorLastKey = null;
       do {
         PartialListing listing = store.list(key, S3_MAX_LISTING_LENGTH, priorLastKey, true);
@@ -372,7 +379,9 @@ public class NativeS3FileSystem extends 
         //this is fine, we don't require a marker
       }
     } else {
-      LOG.debug("Deleting file '" + f + "'");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Deleting file '" + f + "'");
+      }
       createParent(f);
       store.delete(key);
     }
@@ -388,27 +397,40 @@ public class NativeS3FileSystem extends 
       return newDirectory(absolutePath);
     }
     
-    LOG.debug("getFileStatus retrieving metadata for key '" + key + "'");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("getFileStatus retrieving metadata for key '" + key + "'");
+    }
     FileMetadata meta = store.retrieveMetadata(key);
     if (meta != null) {
-      LOG.debug("getFileStatus returning 'file' for key '" + key + "'");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("getFileStatus returning 'file' for key '" + key + "'");
+      }
       return newFile(meta, absolutePath);
     }
     if (store.retrieveMetadata(key + FOLDER_SUFFIX) != null) {
-      LOG.debug("getFileStatus returning 'directory' for key '" + key + "' as '"
-          + key + FOLDER_SUFFIX + "' exists");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("getFileStatus returning 'directory' for key '" + key +
+            "' as '" + key + FOLDER_SUFFIX + "' exists");
+      }
       return newDirectory(absolutePath);
     }
     
-    LOG.debug("getFileStatus listing key '" + key + "'");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("getFileStatus listing key '" + key + "'");
+    }
     PartialListing listing = store.list(key, 1);
     if (listing.getFiles().length > 0 ||
         listing.getCommonPrefixes().length > 0) {
-      LOG.debug("getFileStatus returning 'directory' for key '" + key + "' as it has contents");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("getFileStatus returning 'directory' for key '" + key +
+            "' as it has contents");
+      }
       return newDirectory(absolutePath);
     }
     
-    LOG.debug("getFileStatus could not find key '" + key + "'");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("getFileStatus could not find key '" + key + "'");
+    }
     throw new FileNotFoundException("No such file or directory '" + absolutePath + "'");
   }
 
@@ -510,7 +532,9 @@ public class NativeS3FileSystem extends 
 
       }
     } catch (FileNotFoundException e) {
-      LOG.debug("Making dir '" + f + "' in S3");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Making dir '" + f + "' in S3");
+      }
       String key = pathToKey(f) + FOLDER_SUFFIX;
       store.storeEmptyFile(key);    
     }
@@ -560,22 +584,35 @@ public class NativeS3FileSystem extends 
     try {
       boolean dstIsFile = getFileStatus(dst).isFile();
       if (dstIsFile) {
-        LOG.debug(debugPreamble + "returning false as dst is an already existing file");
+        if(LOG.isDebugEnabled()) {
+          LOG.debug(debugPreamble +
+              "returning false as dst is an already existing file");
+        }
         return false;
       } else {
-        LOG.debug(debugPreamble + "using dst as output directory");
+        if(LOG.isDebugEnabled()) {
+          LOG.debug(debugPreamble + "using dst as output directory");
+        }
         dstKey = pathToKey(makeAbsolute(new Path(dst, src.getName())));
       }
     } catch (FileNotFoundException e) {
-      LOG.debug(debugPreamble + "using dst as output destination");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug(debugPreamble + "using dst as output destination");
+      }
       dstKey = pathToKey(makeAbsolute(dst));
       try {
         if (getFileStatus(dst.getParent()).isFile()) {
-          LOG.debug(debugPreamble + "returning false as dst parent exists and is a file");
+          if(LOG.isDebugEnabled()) {
+            LOG.debug(debugPreamble +
+                "returning false as dst parent exists and is a file");
+          }
           return false;
         }
       } catch (FileNotFoundException ex) {
-        LOG.debug(debugPreamble + "returning false as dst parent does not exist");
+        if(LOG.isDebugEnabled()) {
+          LOG.debug(debugPreamble +
+              "returning false as dst parent does not exist");
+        }
         return false;
       }
     }
@@ -584,15 +621,22 @@ public class NativeS3FileSystem extends 
     try {
       srcIsFile = getFileStatus(src).isFile();
     } catch (FileNotFoundException e) {
-      LOG.debug(debugPreamble + "returning false as src does not exist");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug(debugPreamble + "returning false as src does not exist");
+      }
       return false;
     }
     if (srcIsFile) {
-      LOG.debug(debugPreamble + "src is file, so doing copy then delete in S3");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug(debugPreamble +
+            "src is file, so doing copy then delete in S3");
+      }
       store.copy(srcKey, dstKey);
       store.delete(srcKey);
     } else {
-      LOG.debug(debugPreamble + "src is directory, so copying contents");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug(debugPreamble + "src is directory, so copying contents");
+      }
       store.storeEmptyFile(dstKey + FOLDER_SUFFIX);
 
       List<String> keysToDelete = new ArrayList<String>();
@@ -606,7 +650,10 @@ public class NativeS3FileSystem extends 
         priorLastKey = listing.getPriorLastKey();
       } while (priorLastKey != null);
 
-      LOG.debug(debugPreamble + "all files in src copied, now removing src files");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug(debugPreamble +
+            "all files in src copied, now removing src files");
+      }
       for (String key: keysToDelete) {
         store.delete(key);
       }
@@ -616,7 +663,9 @@ public class NativeS3FileSystem extends 
       } catch (FileNotFoundException e) {
         //this is fine, we don't require a marker
       }
-      LOG.debug(debugPreamble + "done");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug(debugPreamble + "done");
+      }
     }
 
     return true;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/SequenceFile.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/SequenceFile.java Sat Aug 28 22:44:10
2010
@@ -1834,7 +1834,7 @@ public class SequenceFile {
         --noBufferedValues;
         
         // Sanity check
-        if (valLength < 0) {
+        if ((valLength < 0) && LOG.isDebugEnabled()) {
           LOG.debug(val + " is a zero-length value");
         }
       }
@@ -1873,7 +1873,7 @@ public class SequenceFile {
         --noBufferedValues;
         
         // Sanity check
-        if (valLength < 0) {
+        if ((valLength < 0) && LOG.isDebugEnabled()) {
           LOG.debug(val + " is a zero-length value");
         }
       }
@@ -2415,7 +2415,9 @@ public class SequenceFile {
     }
     
     private int sortPass(boolean deleteInput) throws IOException {
-      LOG.debug("running sort pass");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("running sort pass");
+      }
       SortPass sortPass = new SortPass();         // make the SortPass
       sortPass.setProgressable(progressable);
       mergeSort = new MergeSort(sortPass.new SeqFileComparator());
@@ -2515,7 +2517,9 @@ public class SequenceFile {
           }
 
           // buffer is full -- sort & flush it
-          LOG.debug("flushing segment " + segments);
+          if(LOG.isDebugEnabled()) {
+            LOG.debug("flushing segment " + segments);
+          }
           rawBuffer = rawKeys.getData();
           sort(count);
           // indicate we're making progress
@@ -2798,7 +2802,9 @@ public class SequenceFile {
 
     /** sort calls this to generate the final merged output */
     private int mergePass(Path tmpDir) throws IOException {
-      LOG.debug("running merge pass");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("running merge pass");
+      }
       Writer writer = cloneFileAttributes(
                                           outFile.suffix(".0"), outFile, null);
       RawKeyValueIterator r = merge(outFile.suffix(".0"), 
@@ -3028,7 +3034,9 @@ public class SequenceFile {
             Path outputFile =  lDirAlloc.getLocalPathForWrite(
                                                 tmpFilename.toString(),
                                                 approxOutputSize, conf);
-            LOG.debug("writing intermediate results to " + outputFile);
+            if(LOG.isDebugEnabled()) { 
+              LOG.debug("writing intermediate results to " + outputFile);
+            }
             Writer writer = cloneFileAttributes(
                                                 fs.makeQualified(segmentsToMerge.get(0).segmentPathName),

                                                 fs.makeQualified(outputFile), null);

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CodecPool.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CodecPool.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CodecPool.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/CodecPool.java Sat Aug 28 22:44:10
2010
@@ -107,7 +107,9 @@ public class CodecPool {
       LOG.info("Got brand-new compressor");
     } else {
       compressor.reinit(conf);
-      LOG.debug("Got recycled compressor");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Got recycled compressor");
+      }
     }
     return compressor;
   }
@@ -131,7 +133,9 @@ public class CodecPool {
       decompressor = codec.createDecompressor();
       LOG.info("Got brand-new decompressor");
     } else {
-      LOG.debug("Got recycled decompressor");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Got recycled decompressor");
+      }
     }
     return decompressor;
   }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
(original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
Sat Aug 28 22:44:10 2010
@@ -76,6 +76,8 @@ public class BuiltInZlibDeflater extends
       LOG.warn(strategy + " not supported by BuiltInZlibDeflater.");
       setStrategy(DEFAULT_STRATEGY);
     }
-    LOG.debug("Reinit compressor with new compression configuration");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("Reinit compressor with new compression configuration");
+    }
   }
 }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java Sat
Aug 28 22:44:10 2010
@@ -253,7 +253,9 @@ public class ZlibCompressor implements C
     stream = init(level.compressionLevel(), 
                   strategy.compressionStrategy(), 
                   windowBits.windowBits());
-    LOG.debug("Reinit compressor with new compression configuration");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("Reinit compressor with new compression configuration");
+    }
   }
 
   public synchronized void setInput(byte[] b, int off, int len) {

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/Compression.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/Compression.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/Compression.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/file/tfile/Compression.java Sat Aug
28 22:44:10 2010
@@ -278,7 +278,9 @@ final class Compression {
             // it.
             LOG.warn("Compressor obtained from CodecPool already finished()");
           } else {
-            LOG.debug("Got a compressor: " + compressor.hashCode());
+            if(LOG.isDebugEnabled()) {
+              LOG.debug("Got a compressor: " + compressor.hashCode());
+            }
           }
           /**
            * Following statement is necessary to get around bugs in 0.18 where a
@@ -293,7 +295,9 @@ final class Compression {
 
     public void returnCompressor(Compressor compressor) {
       if (compressor != null) {
-        LOG.debug("Return a compressor: " + compressor.hashCode());
+        if(LOG.isDebugEnabled()) {
+          LOG.debug("Return a compressor: " + compressor.hashCode());
+        }
         CodecPool.returnCompressor(compressor);
       }
     }
@@ -308,7 +312,9 @@ final class Compression {
             // it.
             LOG.warn("Deompressor obtained from CodecPool already finished()");
           } else {
-            LOG.debug("Got a decompressor: " + decompressor.hashCode());
+            if(LOG.isDebugEnabled()) {
+              LOG.debug("Got a decompressor: " + decompressor.hashCode());
+            }
           }
           /**
            * Following statement is necessary to get around bugs in 0.18 where a
@@ -324,7 +330,9 @@ final class Compression {
 
     public void returnDecompressor(Decompressor decompressor) {
       if (decompressor != null) {
-        LOG.debug("Returned a decompressor: " + decompressor.hashCode());
+        if(LOG.isDebugEnabled()) {
+          LOG.debug("Returned a decompressor: " + decompressor.hashCode());
+        }
         CodecPool.returnDecompressor(decompressor);
       }
     }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java Sat
Aug 28 22:44:10 2010
@@ -67,9 +67,11 @@ class RetryInvocationHandler implements 
           }
           return null;
         }
-        LOG.debug("Exception while invoking " + method.getName()
-                 + " of " + implementation.getClass() + ". Retrying."
-                 + StringUtils.stringifyException(e));
+        if(LOG.isDebugEnabled()) {
+          LOG.debug("Exception while invoking " + method.getName()
+              + " of " + implementation.getClass() + ". Retrying."
+              + StringUtils.stringifyException(e));
+        }
       }
     }
   }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Client.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Client.java Sat Aug 28 22:44:10 2010
@@ -444,8 +444,10 @@ public class Client {
           disposeSasl();
           if (shouldAuthenticateOverKrb()) {
             if (currRetries < maxRetries) {
-              LOG.debug("Exception encountered while connecting to "
-                  + "the server : " + ex);
+              if(LOG.isDebugEnabled()) {
+                LOG.debug("Exception encountered while connecting to "
+                    + "the server : " + ex);
+              }
               // try re-login
               if (UserGroupInformation.isLoginKeytabBased()) {
                 UserGroupInformation.getLoginUser().reloginFromKeytab();

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java Sat Aug 28 22:44:10 2010
@@ -615,7 +615,9 @@ public abstract class Server {
           // If there were some calls that have not been sent out for a
           // long time, discard them.
           //
-          LOG.debug("Checking for old call responses.");
+          if(LOG.isDebugEnabled()) {
+            LOG.debug("Checking for old call responses.");
+          }
           ArrayList<Call> calls;
           
           // get the list of channels from list of keys.

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java Sat Aug 28 22:44:10
2010
@@ -192,16 +192,15 @@ class WritableRpcEngine implements RpcEn
 
     public Object invoke(Object proxy, Method method, Object[] args)
       throws Throwable {
-      final boolean logDebug = LOG.isDebugEnabled();
       long startTime = 0;
-      if (logDebug) {
+      if (LOG.isDebugEnabled()) {
         startTime = System.currentTimeMillis();
       }
 
       ObjectWritable value = (ObjectWritable)
         client.call(new Invocation(method, args), address, 
                     protocol, ticket, rpcTimeout);
-      if (logDebug) {
+      if (LOG.isDebugEnabled()) {
         long callTime = System.currentTimeMillis() - startTime;
         LOG.debug("Call: " + method.getName() + " " + callTime);
       }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/net/NetworkTopology.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/net/NetworkTopology.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/net/NetworkTopology.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/net/NetworkTopology.java Sat Aug 28 22:44:10
2010
@@ -333,7 +333,9 @@ public class NetworkTopology {
           numOfRacks++;
         }
       }
-      LOG.debug("NetworkTopology became:\n" + this.toString());
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("NetworkTopology became:\n" + this.toString());
+      }
     } finally {
       netlock.writeLock().unlock();
     }
@@ -359,7 +361,9 @@ public class NetworkTopology {
           numOfRacks--;
         }
       }
-      LOG.debug("NetworkTopology became:\n" + this.toString());
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("NetworkTopology became:\n" + this.toString());
+      }
     } finally {
       netlock.writeLock().unlock();
     }

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/Groups.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/Groups.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/security/Groups.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/security/Groups.java Sat Aug 28 22:44:10
2010
@@ -78,14 +78,18 @@ public class Groups {
     long now = System.currentTimeMillis();
     // if cache has a value and it hasn't expired
     if (groups != null && (groups.getTimestamp() + cacheTimeout > now)) {
-      LOG.debug("Returning cached groups for '" + user + "'");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Returning cached groups for '" + user + "'");
+      }
       return groups.getGroups();
     }
     
     // Create and cache user's groups
     groups = new CachedGroups(impl.getGroups(user));
     userToGroupsMap.put(user, groups);
-    LOG.debug("Returning fetched groups for '" + user + "'");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("Returning fetched groups for '" + user + "'");
+    }
     return groups.getGroups();
   }
   
@@ -132,7 +136,9 @@ public class Groups {
    */
   public static Groups getUserToGroupsMappingService(Configuration conf) {
     if(GROUPS == null) {
-      LOG.debug(" Creating new Groups object");
+      if(LOG.isDebugEnabled()) {
+        LOG.debug(" Creating new Groups object");
+      }
       GROUPS = new Groups(conf);
     }
     return GROUPS;

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/SaslRpcClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/SaslRpcClient.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/security/SaslRpcClient.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/security/SaslRpcClient.java Sat Aug 28
22:44:10 2010
@@ -80,10 +80,9 @@ public class SaslRpcClient {
       break;
     case KERBEROS:
       if (LOG.isDebugEnabled()) {
-        LOG
-            .debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName()
-                + " client. Server's Kerberos principal name is "
-                + serverPrincipal);
+        LOG.debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName()
+            + " client. Server's Kerberos principal name is "
+            + serverPrincipal);
       }
       if (serverPrincipal == null || serverPrincipal.length() == 0) {
         throw new IOException(

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java Sat
Aug 28 22:44:10 2010
@@ -534,13 +534,17 @@ public class UserGroupInformation {
             while (true) {
               try {
                 long now = System.currentTimeMillis();
-                LOG.debug("Current time is " + now);
-                LOG.debug("Next refresh is " + nextRefresh);
+                if(LOG.isDebugEnabled()) {
+                  LOG.debug("Current time is " + now);
+                  LOG.debug("Next refresh is " + nextRefresh);
+                }
                 if (now < nextRefresh) {
                   Thread.sleep(nextRefresh - now);
                 }
                 Shell.execCommand(cmd, "-R");
-                LOG.debug("renewed ticket");
+                if(LOG.isDebugEnabled()) {
+                  LOG.debug("renewed ticket");
+                }
                 reloginFromTicketCache();
                 tgt = getTGT();
                 if (tgt == null) {

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
(original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
Sat Aug 28 22:44:10 2010
@@ -115,8 +115,10 @@ public class ServiceAuthorizationManager
         // just keep going
       }
     }
-    LOG.debug("for protocol authorization compare (" + clientPrincipal + "): " 
-        + shortName + " with " + user.getShortUserName());
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("for protocol authorization compare (" + clientPrincipal +
+          "): " + shortName + " with " + user.getShortUserName());
+    }
     if((shortName != null &&  !shortName.equals(user.getShortUserName())) || 
         !acl.isUserAllowed(user)) {
       AUDITLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol="+protocol);

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java Sat Aug
28 22:44:10 2010
@@ -316,7 +316,9 @@ public class GenericOptionsParser {
       if (!localFs.exists(p)) {
           throw new FileNotFoundException("File "+fileName+" does not exist.");
       }
-      LOG.debug("setting conf tokensFile: " + fileName);
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("setting conf tokensFile: " + fileName);
+      }
       conf.set("mapreduce.job.credentials.json", localFs.makeQualified(p)
           .toString());
 

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/util/NativeCodeLoader.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/util/NativeCodeLoader.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/util/NativeCodeLoader.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/util/NativeCodeLoader.java Sat Aug 28 22:44:10
2010
@@ -42,15 +42,20 @@ public class NativeCodeLoader {
   
   static {
     // Try to load native hadoop library and set fallback flag appropriately
-    LOG.debug("Trying to load the custom-built native-hadoop library...");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("Trying to load the custom-built native-hadoop library...");
+    }
     try {
       System.loadLibrary("hadoop");
       LOG.info("Loaded the native-hadoop library");
       nativeCodeLoaded = true;
     } catch (Throwable t) {
       // Ignore failure to load
-      LOG.debug("Failed to load native-hadoop with error: " + t);
-      LOG.debug("java.library.path=" + System.getProperty("java.library.path"));
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Failed to load native-hadoop with error: " + t);
+        LOG.debug("java.library.path=" +
+            System.getProperty("java.library.path"));
+      }
     }
     
     if (!nativeCodeLoaded) {

Modified: hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java (original)
+++ hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java Sat Aug 28
22:44:10 2010
@@ -94,7 +94,9 @@ public class ProbabilityModel {
 
     float ret = conf.getFloat(newProbName,
         conf.getFloat(ALL_PROBABILITIES, DEFAULT_PROB));
-    LOG.debug("Request for " + newProbName + " returns=" + ret);
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("Request for " + newProbName + " returns=" + ret);
+    }
     // Make sure that probability level is valid.
     if (ret < DEFAULT_PROB || ret > MAX_PROB) {
       LOG.info("Probability level is incorrect. Default value is set");

Modified: hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
(original)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java
Sat Aug 28 22:44:10 2010
@@ -310,7 +310,9 @@ public class LoadGenerator extends Confi
       }
     } 
     
-    LOG.debug("Done with testing.  Waiting for threads to finish.");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("Done with testing.  Waiting for threads to finish.");
+    }
     for (DFSClientThread thread : threads) {
       thread.join();
       for (int i=0; i<TOTAL_OP_TYPES; i++) {

Modified: hadoop/common/trunk/src/test/core/org/apache/hadoop/io/TestArrayFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/io/TestArrayFile.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/io/TestArrayFile.java (original)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/io/TestArrayFile.java Sat Aug 28 22:44:10
2010
@@ -54,7 +54,9 @@ public class TestArrayFile extends TestC
   }
 
   private static RandomDatum[] generate(int count) {
-    LOG.debug("generating " + count + " records in debug");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("generating " + count + " records in debug");
+    }
     RandomDatum[] data = new RandomDatum[count];
     RandomDatum.Generator generator = new RandomDatum.Generator();
     for (int i = 0; i < count; i++) {
@@ -68,7 +70,9 @@ public class TestArrayFile extends TestC
     throws IOException {
     Configuration conf = new Configuration();
     MapFile.delete(fs, file);
-    LOG.debug("creating with " + data.length + " debug");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("creating with " + data.length + " debug");
+    }
     ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, file, RandomDatum.class);
     writer.setIndexInterval(100);
     for (int i = 0; i < data.length; i++)
@@ -79,7 +83,9 @@ public class TestArrayFile extends TestC
   private static void readTest(FileSystem fs, RandomDatum[] data, String file, Configuration
conf)
     throws IOException {
     RandomDatum v = new RandomDatum();
-    LOG.debug("reading " + data.length + " debug");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("reading " + data.length + " debug");
+    }
     ArrayFile.Reader reader = new ArrayFile.Reader(fs, file, conf);
     for (int i = 0; i < data.length; i++) {       // try forwards
       reader.get(i, v);
@@ -94,7 +100,9 @@ public class TestArrayFile extends TestC
       }
     }
     reader.close();
-    LOG.debug("done reading " + data.length + " debug");
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("done reading " + data.length + " debug");
+    }
   }
 
 

Modified: hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java (original)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java Sat Aug 28 22:44:10
2010
@@ -83,7 +83,9 @@ public class TestAvroRpc extends TestCas
       try {
         proxy.error();
       } catch (AvroRemoteException e) {
-        LOG.debug("Caught " + e);
+        if(LOG.isDebugEnabled()) {
+          LOG.debug("Caught " + e);
+        }
         caught = true;
       }
       assertTrue(caught);

Modified: hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java (original)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java Sat Aug 28 22:44:10
2010
@@ -296,7 +296,9 @@ public class TestRPC extends TestCase {
     try {
       proxy.error();
     } catch (IOException e) {
-      LOG.debug("Caught " + e);
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Caught " + e);
+      }
       caught = true;
     }
     assertTrue(caught);

Modified: hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/AbstractDaemonCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/AbstractDaemonCluster.java?rev=990460&r1=990459&r2=990460&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/AbstractDaemonCluster.java
(original)
+++ hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/AbstractDaemonCluster.java
Sat Aug 28 22:44:10 2010
@@ -482,7 +482,9 @@ public abstract class AbstractDaemonClus
           LOG.info("Daemon is : " + daemon.getHostName() + " pinging...");
           break;
         } catch (Exception exp) {
-          LOG.debug(daemon.getHostName() + " is waiting to come up.");
+          if(LOG.isDebugEnabled()) {
+            LOG.debug(daemon.getHostName() + " is waiting to come up.");
+          }
           waitFor(60000);
         }
       }
@@ -502,7 +504,9 @@ public abstract class AbstractDaemonClus
       while (true) {
         try {
           daemon.ping();
-          LOG.debug(daemon.getHostName() +" is waiting state to stop.");
+          if(LOG.isDebugEnabled()) {
+            LOG.debug(daemon.getHostName() +" is waiting state to stop.");
+          }
           waitFor(60000);
         } catch (Exception exp) {
           LOG.info("Daemon is : " + daemon.getHostName() + " stopped...");



Mime
View raw message