hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cmcc...@apache.org
Subject git commit: HDFS-4486. Add log category for long-running DFSClient notices. Contributed by Zhe Zhang.
Date Wed, 27 Aug 2014 20:48:19 GMT
Repository: hadoop
Updated Branches:
  refs/heads/trunk d805cc27a -> 225569ece


HDFS-4486. Add log category for long-running DFSClient notices.  Contributed by Zhe Zhang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/225569ec
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/225569ec
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/225569ec

Branch: refs/heads/trunk
Commit: 225569ece229cec32f852f831fd337a139c44b1e
Parents: d805cc2
Author: Colin Patrick Mccabe <cmccabe@cloudera.com>
Authored: Wed Aug 27 13:39:40 2014 -0700
Committer: Colin Patrick Mccabe <cmccabe@cloudera.com>
Committed: Wed Aug 27 13:39:40 2014 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/crypto/OpensslCipher.java |  2 ++
 .../crypto/random/OpensslSecureRandom.java      |  3 +++
 .../org/apache/hadoop/io/nativeio/NativeIO.java |  7 ++---
 .../JniBasedUnixGroupsMappingWithFallback.java  |  3 ++-
 .../apache/hadoop/util/PerformanceAdvisory.java | 24 +++++++++++++++++
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt     |  3 +++
 .../apache/hadoop/hdfs/BlockReaderFactory.java  | 27 +++++++++-----------
 .../hdfs/shortcircuit/DomainSocketFactory.java  |  4 ++-
 8 files changed, 53 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/225569ec/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
index 264652b..2eb16ee 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.util.NativeCodeLoader;
 
 import com.google.common.base.Preconditions;
+import org.apache.hadoop.util.PerformanceAdvisory;
 
 /**
  * OpenSSL cipher using JNI.
@@ -82,6 +83,7 @@ public final class OpensslCipher {
     String loadingFailure = null;
     try {
       if (!NativeCodeLoader.buildSupportsOpenssl()) {
+        PerformanceAdvisory.LOG.debug("Build does not support openssl");
         loadingFailure = "build does not support openssl.";
       } else {
         initIDs();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/225569ec/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
index b1fa988..6c53a0a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.util.NativeCodeLoader;
 
 import com.google.common.base.Preconditions;
+import org.apache.hadoop.util.PerformanceAdvisory;
 
 /**
  * OpenSSL secure random using JNI.
@@ -67,6 +68,8 @@ public class OpensslSecureRandom extends Random {
   
   public OpensslSecureRandom() {
     if (!nativeEnabled) {
+      PerformanceAdvisory.LOG.debug("Build does not support openssl, " +
+          "falling back to Java SecureRandom.");
       fallback = new java.security.SecureRandom();
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/225569ec/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
index fafa295..53d31d6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.fs.HardLink;
 import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.PerformanceAdvisory;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -196,7 +197,7 @@ public class NativeIO {
           // This can happen if the user has an older version of libhadoop.so
           // installed - in this case we can continue without native IO
           // after warning
-          LOG.error("Unable to initialize NativeIO libraries", t);
+          PerformanceAdvisory.LOG.debug("Unable to initialize NativeIO libraries", t);
         }
       }
     }
@@ -574,7 +575,7 @@ public class NativeIO {
           // This can happen if the user has an older version of libhadoop.so
           // installed - in this case we can continue without native IO
           // after warning
-          LOG.error("Unable to initialize NativeIO libraries", t);
+          PerformanceAdvisory.LOG.debug("Unable to initialize NativeIO libraries", t);
         }
       }
     }
@@ -593,7 +594,7 @@ public class NativeIO {
         // This can happen if the user has an older version of libhadoop.so
         // installed - in this case we can continue without native IO
         // after warning
-        LOG.error("Unable to initialize NativeIO libraries", t);
+        PerformanceAdvisory.LOG.debug("Unable to initialize NativeIO libraries", t);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/225569ec/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java
index 908ca14..40333fc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java
@@ -24,6 +24,7 @@ import java.util.List;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.util.NativeCodeLoader;
+import org.apache.hadoop.util.PerformanceAdvisory;
 
 public class JniBasedUnixGroupsMappingWithFallback implements
     GroupMappingServiceProvider {
@@ -37,7 +38,7 @@ public class JniBasedUnixGroupsMappingWithFallback implements
     if (NativeCodeLoader.isNativeCodeLoaded()) {
       this.impl = new JniBasedUnixGroupsMapping();
     } else {
-      LOG.debug("Falling back to shell based");
+      PerformanceAdvisory.LOG.debug("Falling back to shell based");
       this.impl = new ShellBasedUnixGroupsMapping();
     }
     if (LOG.isDebugEnabled()){

http://git-wip-us.apache.org/repos/asf/hadoop/blob/225569ec/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PerformanceAdvisory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PerformanceAdvisory.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PerformanceAdvisory.java
new file mode 100644
index 0000000..306d47c
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PerformanceAdvisory.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.util;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class PerformanceAdvisory {
+  public static final Log LOG = LogFactory.getLog(PerformanceAdvisory.class);
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/225569ec/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 7783243..f3ecf07 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -514,6 +514,9 @@ Release 2.6.0 - UNRELEASED
     HDFS-6899. Allow changing MiniDFSCluster volumes per DN and capacity
     per volume. (Arpit Agarwal)
 
+    HDFS-4486. Add log category for long-running DFSClient notices (Zhe Zhang
+    via Colin Patrick McCabe)
+
   OPTIMIZATIONS
 
     HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/225569ec/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderFactory.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderFactory.java
index d27bd6e..3fb442b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderFactory.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderFactory.java
@@ -54,6 +54,7 @@ import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.PerformanceAdvisory;
 import org.apache.hadoop.util.Time;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -343,10 +344,9 @@ public class BlockReaderFactory implements ShortCircuitReplicaCreator
{
       return null;
     }
     if (clientContext.getDisableLegacyBlockReaderLocal()) {
-      if (LOG.isTraceEnabled()) {
-        LOG.trace(this + ": can't construct BlockReaderLocalLegacy because " +
-            "disableLegacyBlockReaderLocal is set.");
-      }
+      PerformanceAdvisory.LOG.debug(this + ": can't construct " +
+          "BlockReaderLocalLegacy because " +
+          "disableLegacyBlockReaderLocal is set.");
       return null;
     }
     IOException ioe = null;
@@ -385,10 +385,8 @@ public class BlockReaderFactory implements ShortCircuitReplicaCreator
{
                       getPathInfo(inetSocketAddress, conf);
     }
     if (!pathInfo.getPathState().getUsableForShortCircuit()) {
-      if (LOG.isTraceEnabled()) {
-        LOG.trace(this + ": " + pathInfo + " is not " +
-            "usable for short circuit; giving up on BlockReaderLocal.");
-      }
+      PerformanceAdvisory.LOG.debug(this + ": " + pathInfo + " is not " +
+          "usable for short circuit; giving up on BlockReaderLocal.");
       return null;
     }
     ShortCircuitCache cache = clientContext.getShortCircuitCache();
@@ -404,8 +402,9 @@ public class BlockReaderFactory implements ShortCircuitReplicaCreator
{
     }
     if (info.getReplica() == null) {
       if (LOG.isTraceEnabled()) {
-        LOG.trace(this + ": failed to get ShortCircuitReplica.  " +
-            "Cannot construct BlockReaderLocal via " + pathInfo.getPath());
+        PerformanceAdvisory.LOG.debug(this + ": failed to get " +
+            "ShortCircuitReplica. Cannot construct " +
+            "BlockReaderLocal via " + pathInfo.getPath());
       }
       return null;
     }
@@ -580,11 +579,9 @@ public class BlockReaderFactory implements ShortCircuitReplicaCreator
{
                       getPathInfo(inetSocketAddress, conf);
     }
     if (!pathInfo.getPathState().getUsableForDataTransfer()) {
-      if (LOG.isTraceEnabled()) {
-        LOG.trace(this + ": not trying to create a remote block reader " +
-            "because the UNIX domain socket at " + pathInfo +
-            " is not usable.");
-      }
+      PerformanceAdvisory.LOG.debug(this + ": not trying to create a " +
+          "remote block reader because the UNIX domain socket at " +
+          pathInfo + " is not usable.");
       return null;
     }
     if (LOG.isTraceEnabled()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/225569ec/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/DomainSocketFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/DomainSocketFactory.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/DomainSocketFactory.java
index e067de7..5fd31a9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/DomainSocketFactory.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/DomainSocketFactory.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.net.unix.DomainSocket;
 import com.google.common.base.Preconditions;
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
+import org.apache.hadoop.util.PerformanceAdvisory;
 
 public class DomainSocketFactory {
   private static final Log LOG = LogFactory.getLog(DomainSocketFactory.class);
@@ -105,7 +106,8 @@ public class DomainSocketFactory {
     }
 
     if (feature == null) {
-      LOG.debug("Both short-circuit local reads and UNIX domain socket are disabled.");
+      PerformanceAdvisory.LOG.debug(
+          "Both short-circuit local reads and UNIX domain socket are disabled.");
     } else {
       if (conf.getDomainSocketPath().isEmpty()) {
         throw new HadoopIllegalArgumentException(feature + " is enabled but "


Mime
View raw message