hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From whe...@apache.org
Subject [21/50] [abbrv] hadoop git commit: HDFS-9379. Make NNThroughputBenchmark support more than 10 datanodes. (Contributed by Mingliang Liu)
Date Thu, 12 Nov 2015 18:22:33 GMT
HDFS-9379. Make NNThroughputBenchmark support more than 10 datanodes. (Contributed by Mingliang
Liu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2801b42a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2801b42a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2801b42a

Branch: refs/heads/HDFS-8707
Commit: 2801b42a7e178ad6a0e6b0f29f22f3571969c530
Parents: bf6aa30
Author: Arpit Agarwal <arp@apache.org>
Authored: Fri Nov 6 18:58:49 2015 -0800
Committer: Arpit Agarwal <arp@apache.org>
Committed: Fri Nov 6 18:58:49 2015 -0800

----------------------------------------------------------------------
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt                    | 3 +++
 .../hadoop/hdfs/server/namenode/NNThroughputBenchmark.java     | 6 +-----
 2 files changed, 4 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2801b42a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index f12a2a4..dbffc3f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1632,6 +1632,9 @@ Release 2.8.0 - UNRELEASED
     HDFS-9236. Missing sanity check for block size during block recovery.
     (Tony Wu via Yongjun Zhang)
 
+    HDFS-9379. Make NNThroughputBenchmark$BlockReportStats support more than 10
+    datanodes. (Mingliang Liu via Arpit Agarwal)
+
   OPTIMIZATIONS
 
     HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2801b42a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java
index b963d8f..6d27315 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java
@@ -1145,14 +1145,10 @@ public class NNThroughputBenchmark implements Tool {
       int nrFiles = (int)Math.ceil((double)nrBlocks / blocksPerFile);
       datanodes = new TinyDatanode[nrDatanodes];
       // create data-nodes
-      String prevDNName = "";
       for(int idx=0; idx < nrDatanodes; idx++) {
         datanodes[idx] = new TinyDatanode(idx, blocksPerReport);
         datanodes[idx].register();
-        assert datanodes[idx].getXferAddr().compareTo(prevDNName) > 0
-          : "Data-nodes must be sorted lexicographically.";
         datanodes[idx].sendHeartbeat();
-        prevDNName = datanodes[idx].getXferAddr();
       }
 
       // create files 
@@ -1184,7 +1180,7 @@ public class NNThroughputBenchmark implements Tool {
             prevBlock, null, HdfsConstants.GRANDFATHER_INODE_ID, null);
         prevBlock = loc.getBlock();
         for(DatanodeInfo dnInfo : loc.getLocations()) {
-          int dnIdx = Arrays.binarySearch(datanodes, dnInfo.getXferAddr());
+          int dnIdx = dnInfo.getXferPort() - 1;
           datanodes[dnIdx].addBlock(loc.getBlock().getLocalBlock());
           ReceivedDeletedBlockInfo[] rdBlocks = { new ReceivedDeletedBlockInfo(
               loc.getBlock().getLocalBlock(),


Mime
View raw message