hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From z..@apache.org
Subject hadoop git commit: Revert "HDFS-12502. nntop should support a category based on FilesInGetListingOps."
Date Tue, 24 Oct 2017 22:05:40 GMT
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 0c8a2c260 -> 736fb3b66


Revert "HDFS-12502. nntop should support a category based on FilesInGetListingOps."

This reverts commit 86c2adc4528ffd27b744ee7e88109b86aaa3ec02.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/736fb3b6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/736fb3b6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/736fb3b6

Branch: refs/heads/branch-2
Commit: 736fb3b66c31062efd71a38f23362fa6a09d5097
Parents: 0c8a2c2
Author: Zhe Zhang <zhz@apache.org>
Authored: Tue Oct 24 15:05:26 2017 -0700
Committer: Zhe Zhang <zhz@apache.org>
Committed: Tue Oct 24 15:05:26 2017 -0700

----------------------------------------------------------------------
 .../hdfs/server/namenode/FSNamesystem.java      |  5 ----
 .../server/namenode/top/metrics/TopMetrics.java | 30 +++++---------------
 .../server/namenode/metrics/TestTopMetrics.java | 11 ++-----
 3 files changed, 10 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/736fb3b6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index eded007..e21da7f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -3619,11 +3619,6 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
       readUnlock(operationName);
     }
     logAuditEvent(true, operationName, src);
-    if (topConf.isEnabled && isAuditEnabled() && isExternalInvocation()
-        && dl != null && Server.getRemoteUser() != null) {
-      topMetrics.reportFilesInGetListing(Server.getRemoteUser().toString(),
-          dl.getPartialListing().length);
-    }
     return dl;
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/736fb3b6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/top/metrics/TopMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/top/metrics/TopMetrics.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/top/metrics/TopMetrics.java
index 3d8dd19..2719c88 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/top/metrics/TopMetrics.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/top/metrics/TopMetrics.java
@@ -70,14 +70,6 @@ public class TopMetrics implements MetricsSource {
   public static final Logger LOG = LoggerFactory.getLogger(TopMetrics.class);
   public static final String TOPMETRICS_METRICS_SOURCE_NAME =
       "NNTopUserOpCounts";
-  /**
-   * In addition to counts of different RPC calls, NNTop also reports top
-   * users listing large directories (measured by the number of files involved
-   * in listing operations from the user). This is important because the CPU
-   * and GC overhead of a listing operation grows linearly with the number of
-   * files involved. This category in NNTop is {@link #FILES_IN_GETLISTING}.
-   */
-  public static final String FILES_IN_GETLISTING = "filesInGetListing";
   private final boolean isMetricsSourceEnabled;
 
   private static void logConf(Configuration conf) {
@@ -131,30 +123,22 @@ public class TopMetrics implements MetricsSource {
   public void report(boolean succeeded, String userName, InetAddress addr,
       String cmd, String src, String dst, FileStatus status) {
     // currently nntop only makes use of the username and the command
-    report(userName, cmd, 1);
+    report(userName, cmd);
   }
 
-  public void reportFilesInGetListing(String userName, int numFiles) {
-    report(userName, FILES_IN_GETLISTING, numFiles);
-  }
-
-  public void report(String userName, String cmd, int delta) {
+  public void report(String userName, String cmd) {
     long currTime = Time.monotonicNow();
-    report(currTime, userName, cmd, delta);
+    report(currTime, userName, cmd);
   }
 
-  public void report(long currTime, String userName, String cmd, int delta) {
+  public void report(long currTime, String userName, String cmd) {
     LOG.debug("a metric is reported: cmd: {} user: {}", cmd, userName);
     userName = UserGroupInformation.trimLoginMethod(userName);
     for (RollingWindowManager rollingWindowManager : rollingWindowManagers
         .values()) {
-      rollingWindowManager.recordMetric(currTime, cmd, userName, delta);
-      // Increase the number of all RPC calls by the user, unless the report
-      // is for the number of files in a listing operation.
-      if (!cmd.equals(FILES_IN_GETLISTING)) {
-        rollingWindowManager.recordMetric(currTime,
-            TopConf.ALL_CMDS, userName, delta);
-      }
+      rollingWindowManager.recordMetric(currTime, cmd, userName, 1);
+      rollingWindowManager.recordMetric(currTime,
+          TopConf.ALL_CMDS, userName, 1);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/736fb3b6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestTopMetrics.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestTopMetrics.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestTopMetrics.java
index cda4b14..4d3a4f0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestTopMetrics.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestTopMetrics.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.metrics2.lib.Interns;
 import org.junit.Test;
 
 import static org.apache.hadoop.hdfs.server.namenode.top.metrics.TopMetrics.TOPMETRICS_METRICS_SOURCE_NAME;
-import static org.apache.hadoop.hdfs.server.namenode.top.metrics.TopMetrics.FILES_IN_GETLISTING;
 import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
@@ -42,10 +41,9 @@ public class TestTopMetrics {
     TopMetrics topMetrics = new TopMetrics(conf,
         topConf.nntopReportingPeriodsMs);
     // Dummy command
-    topMetrics.report("test", "listStatus", 1);
-    topMetrics.report("test", "listStatus", 1);
-    topMetrics.report("test", "listStatus", 1);
-    topMetrics.report("test", FILES_IN_GETLISTING, 1000);
+    topMetrics.report("test", "listStatus");
+    topMetrics.report("test", "listStatus");
+    topMetrics.report("test", "listStatus");
 
     MetricsRecordBuilder rb = getMetrics(topMetrics);
     MetricsCollector mc = rb.parent();
@@ -61,8 +59,5 @@ public class TestTopMetrics {
 
     verify(rb, times(3)).addCounter(Interns.info("op=listStatus." +
         "user=test.count", "Total operations performed by user"), 3L);
-
-    verify(rb, times(3)).addCounter(Interns.info("op=" + FILES_IN_GETLISTING +
-        ".user=test.count", "Total operations performed by user"), 1000L);
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message