hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1547462 - in /hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/server/datanode/ src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/ src/main/java/org/apache/hadoop/hdfs/s...
Date Tue, 03 Dec 2013 16:30:30 GMT
Author: arp
Date: Tue Dec  3 16:30:29 2013
New Revision: 1547462

URL: http://svn.apache.org/r1547462
Log:
HDFS-5484. StorageType and State in DatanodeStorageInfo in NameNode is not accurate. (Contributed
by Eric Sirianni)

Modified:
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsDatasetSpi.java
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestInjectionForSimulatedStorage.java
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockReport.java
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt
Tue Dec  3 16:30:29 2013
@@ -120,3 +120,6 @@ IMPROVEMENTS:
 
     HDFS-5559. Fix TestDatanodeConfig in HDFS-2832. (Contributed by szetszwo)
 
+    HDFS-5484. StorageType and State in DatanodeStorageInfo in NameNode is
+    not accurate. (Eric Sirianni via Arpit Agarwal)
+

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java
Tue Dec  3 16:30:29 2013
@@ -454,7 +454,7 @@ class BPServiceActor implements Runnable
       long brCreateStartTime = now();
       long totalBlockCount = 0;
 
-      Map<String, BlockListAsLongs> perVolumeBlockLists =
+      Map<DatanodeStorage, BlockListAsLongs> perVolumeBlockLists =
           dn.getFSDataset().getBlockReports(bpos.getBlockPoolId());
 
       // Send block report
@@ -463,13 +463,11 @@ class BPServiceActor implements Runnable
           new StorageBlockReport[perVolumeBlockLists.size()];
 
       int i = 0;
-      for(Map.Entry<String, BlockListAsLongs> kvPair : perVolumeBlockLists.entrySet())
{
-        String storageID = kvPair.getKey();
+      for(Map.Entry<DatanodeStorage, BlockListAsLongs> kvPair : perVolumeBlockLists.entrySet())
{
+        DatanodeStorage dnStorage = kvPair.getKey();
         BlockListAsLongs blockList = kvPair.getValue();
         totalBlockCount += blockList.getNumberOfBlocks();
 
-        // Dummy DatanodeStorage object just for sending the block report.
-        DatanodeStorage dnStorage = new DatanodeStorage(storageID);
         reports[i++] =
             new StorageBlockReport(
               dnStorage, blockList.getBlockListAsLongs());

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsDatasetSpi.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsDatasetSpi.java?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsDatasetSpi.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsDatasetSpi.java
Tue Dec  3 16:30:29 2013
@@ -40,6 +40,7 @@ import org.apache.hadoop.hdfs.server.dat
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetFactory;
 import org.apache.hadoop.hdfs.server.datanode.metrics.FSDatasetMBean;
 import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
+import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
 import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo;
 import org.apache.hadoop.hdfs.server.protocol.StorageReport;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
@@ -268,9 +269,9 @@ public interface FsDatasetSpi<V extends 
   /**
    * Returns one block report per volume.
    * @param bpid Block Pool Id
-   * @return - a map of StorageID to block report for the volume.
+   * @return - a map of DatanodeStorage to block report for the volume.
    */
-  public Map<String, BlockListAsLongs> getBlockReports(String bpid);
+  public Map<DatanodeStorage, BlockListAsLongs> getBlockReports(String bpid);
 
   /**
    * Returns the cache report - the full list of cached block IDs of a

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
Tue Dec  3 16:30:29 2013
@@ -78,6 +78,7 @@ import org.apache.hadoop.hdfs.server.dat
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.VolumeChoosingPolicy;
 import org.apache.hadoop.hdfs.server.datanode.metrics.FSDatasetMBean;
 import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
+import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
 import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo;
 import org.apache.hadoop.hdfs.server.protocol.StorageReport;
 import org.apache.hadoop.io.nativeio.NativeIO;
@@ -1089,14 +1090,14 @@ class FsDatasetImpl implements FsDataset
   }
 
   @Override
-  public Map<String, BlockListAsLongs> getBlockReports(String bpid) {
-    Map<String, BlockListAsLongs> blockReportMap =
-        new HashMap<String, BlockListAsLongs>();
+  public Map<DatanodeStorage, BlockListAsLongs> getBlockReports(String bpid) {
+    Map<DatanodeStorage, BlockListAsLongs> blockReportMap =
+        new HashMap<DatanodeStorage, BlockListAsLongs>();
 
     for (FsVolumeImpl v : getVolumes()) {
       ReplicaMap rMap = perVolumeReplicaMap.get(v.getStorageID());
       BlockListAsLongs blockList = getBlockReportWithReplicaMap(bpid, rMap);
-      blockReportMap.put(v.getStorageID(), blockList);
+      blockReportMap.put(v.toDatanodeStorage(), blockList);
     }
 
     return blockReportMap;

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
Tue Dec  3 16:30:29 2013
@@ -38,6 +38,7 @@ import org.apache.hadoop.hdfs.StorageTyp
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.server.datanode.DataStorage;
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
+import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -332,5 +333,10 @@ class FsVolumeImpl implements FsVolumeSp
   public StorageType getStorageType() {
     return storageType;
   }
+  
+  DatanodeStorage toDatanodeStorage() {
+    return new DatanodeStorage(storageID, DatanodeStorage.State.NORMAL, storageType);
+  }
+
 }
 

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
Tue Dec  3 16:30:29 2013
@@ -88,6 +88,7 @@ import org.apache.hadoop.hdfs.server.dat
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
+import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.hdfs.tools.DFSAdmin;
 import org.apache.hadoop.hdfs.web.HftpFileSystem;
@@ -1970,7 +1971,7 @@ public class MiniDFSCluster {
    * @param dataNodeIndex - data node whose block report is desired - the index is same as
for getDataNodes()
    * @return the block report for the specified data node
    */
-  public Map<String, BlockListAsLongs> getBlockReport(String bpid, int dataNodeIndex)
{
+  public Map<DatanodeStorage, BlockListAsLongs> getBlockReport(String bpid, int dataNodeIndex)
{
     if (dataNodeIndex < 0 || dataNodeIndex > dataNodes.size()) {
       throw new IndexOutOfBoundsException();
     }
@@ -1984,10 +1985,10 @@ public class MiniDFSCluster {
    * @return block reports from all data nodes
    *    BlockListAsLongs is indexed in the same order as the list of datanodes returned by
getDataNodes()
    */
-  public List<Map<String, BlockListAsLongs>> getAllBlockReports(String bpid)
{
+  public List<Map<DatanodeStorage, BlockListAsLongs>> getAllBlockReports(String
bpid) {
     int numDataNodes = dataNodes.size();
-    final List<Map<String, BlockListAsLongs>> result
-        = new ArrayList<Map<String, BlockListAsLongs>>(numDataNodes);
+    final List<Map<DatanodeStorage, BlockListAsLongs>> result
+        = new ArrayList<Map<DatanodeStorage, BlockListAsLongs>>(numDataNodes);
     for (int i = 0; i < numDataNodes; ++i) {
       result.add(getBlockReport(bpid, i));
     }

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
Tue Dec  3 16:30:29 2013
@@ -39,6 +39,7 @@ import org.apache.hadoop.hdfs.protocol.B
 import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
+import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
 import org.apache.hadoop.hdfs.tools.DFSAdmin;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SequenceFile;
@@ -1394,11 +1395,11 @@ public class TestDFSShell {
     List<File> files = new ArrayList<File>();
     List<DataNode> datanodes = cluster.getDataNodes();
     String poolId = cluster.getNamesystem().getBlockPoolId();
-    List<Map<String, BlockListAsLongs>> blocks = cluster.getAllBlockReports(poolId);
+    List<Map<DatanodeStorage, BlockListAsLongs>> blocks = cluster.getAllBlockReports(poolId);
     for(int i = 0; i < blocks.size(); i++) {
       DataNode dn = datanodes.get(i);
-      Map<String, BlockListAsLongs> map = blocks.get(i);
-      for(Map.Entry<String, BlockListAsLongs> e : map.entrySet()) {
+      Map<DatanodeStorage, BlockListAsLongs> map = blocks.get(i);
+      for(Map.Entry<DatanodeStorage, BlockListAsLongs> e : map.entrySet()) {
         for(Block b : e.getValue()) {
           files.add(DataNodeTestUtils.getFile(dn, poolId, b.getBlockId()));
         }

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestInjectionForSimulatedStorage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestInjectionForSimulatedStorage.java?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestInjectionForSimulatedStorage.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestInjectionForSimulatedStorage.java
Tue Dec  3 16:30:29 2013
@@ -36,6 +36,7 @@ import org.apache.hadoop.hdfs.protocol.C
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
+import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
 import org.apache.hadoop.util.Time;
 import org.junit.Test;
 
@@ -136,7 +137,7 @@ public class TestInjectionForSimulatedSt
       DFSTestUtil.createFile(cluster.getFileSystem(), testPath, filesize,
           filesize, blockSize, (short) numDataNodes, 0L);
       waitForBlockReplication(testFile, dfsClient.getNamenode(), numDataNodes, 20);
-      List<Map<String, BlockListAsLongs>> blocksList = cluster.getAllBlockReports(bpid);
+      List<Map<DatanodeStorage, BlockListAsLongs>> blocksList = cluster.getAllBlockReports(bpid);
       
       cluster.shutdown();
       cluster = null;
@@ -157,7 +158,7 @@ public class TestInjectionForSimulatedSt
                                   .build();
       cluster.waitActive();
       Set<Block> uniqueBlocks = new HashSet<Block>();
-      for(Map<String, BlockListAsLongs> map : blocksList) {
+      for(Map<DatanodeStorage, BlockListAsLongs> map : blocksList) {
         for(BlockListAsLongs blockList : map.values()) {
           for(Block b : blockList) {
             uniqueBlocks.add(new Block(b));

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
Tue Dec  3 16:30:29 2013
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
@@ -484,12 +485,9 @@ public class SimulatedFSDataset implemen
   }
 
   @Override
-  public synchronized Map<String, BlockListAsLongs> getBlockReports(
+  public synchronized Map<DatanodeStorage, BlockListAsLongs> getBlockReports(
       String bpid) {
-    Map<String, BlockListAsLongs> reports =
-        new HashMap<String, BlockListAsLongs>();
-    reports.put(storage.storageUuid, getBlockReport(bpid));
-    return reports;
+    return Collections.singletonMap(new DatanodeStorage(storage.storageUuid), getBlockReport(bpid));
   }
 
   @Override // FsDatasetSpi

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockReport.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockReport.java?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockReport.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockReport.java
Tue Dec  3 16:30:29 2013
@@ -120,7 +120,7 @@ public class TestBlockReport {
   private static StorageBlockReport[] getBlockReports(
       DataNode dn, String bpid, boolean corruptOneBlockGs,
       boolean corruptOneBlockLen) {
-    Map<String, BlockListAsLongs> perVolumeBlockLists =
+    Map<DatanodeStorage, BlockListAsLongs> perVolumeBlockLists =
         dn.getFSDataset().getBlockReports(bpid);
 
     // Send block report
@@ -130,8 +130,8 @@ public class TestBlockReport {
     boolean corruptedLen = false;
 
     int reportIndex = 0;
-    for(Map.Entry<String, BlockListAsLongs> kvPair : perVolumeBlockLists.entrySet())
{
-      String storageID = kvPair.getKey();
+    for(Map.Entry<DatanodeStorage, BlockListAsLongs> kvPair : perVolumeBlockLists.entrySet())
{
+      DatanodeStorage dnStorage = kvPair.getKey();
       BlockListAsLongs blockList = kvPair.getValue();
 
       // Walk the list of blocks until we find one each to corrupt the
@@ -150,8 +150,6 @@ public class TestBlockReport {
         }
       }
 
-      // Dummy DatanodeStorage object just for sending the block report.
-      DatanodeStorage dnStorage = new DatanodeStorage(storageID);
       reports[reportIndex++] =
           new StorageBlockReport(dnStorage, blockList.getBlockListAsLongs());
     }

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java?rev=1547462&r1=1547461&r2=1547462&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java
Tue Dec  3 16:30:29 2013
@@ -154,7 +154,7 @@ public class TestDataNodeVolumeFailure {
     String bpid = cluster.getNamesystem().getBlockPoolId();
     DatanodeRegistration dnR = dn.getDNRegistrationForBP(bpid);
     
-    Map<String, BlockListAsLongs> perVolumeBlockLists =
+    Map<DatanodeStorage, BlockListAsLongs> perVolumeBlockLists =
         dn.getFSDataset().getBlockReports(bpid);
 
     // Send block report
@@ -162,10 +162,9 @@ public class TestDataNodeVolumeFailure {
         new StorageBlockReport[perVolumeBlockLists.size()];
 
     int reportIndex = 0;
-    for(Map.Entry<String, BlockListAsLongs> kvPair : perVolumeBlockLists.entrySet())
{
-        String storageID = kvPair.getKey();
+    for(Map.Entry<DatanodeStorage, BlockListAsLongs> kvPair : perVolumeBlockLists.entrySet())
{
+        DatanodeStorage dnStorage = kvPair.getKey();
         BlockListAsLongs blockList = kvPair.getValue();
-        DatanodeStorage dnStorage = new DatanodeStorage(storageID);
         reports[reportIndex++] =
             new StorageBlockReport(dnStorage, blockList.getBlockListAsLongs());
     }



Mime
View raw message