hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1538145 - in /hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/ src/test/java/org/apache/hadoop/hdfs/server/common/
Date Sat, 02 Nov 2013 05:21:48 GMT
Author: arp
Date: Sat Nov  2 05:21:48 2013
New Revision: 1538145

URL: http://svn.apache.org/r1538145
Log:
HDFS-5447. Fix TestJspHelper in branch HDFS-2832.

Modified:
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java
    hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt?rev=1538145&r1=1538144&r2=1538145&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt
Sat Nov  2 05:21:48 2013
@@ -57,3 +57,6 @@ IMPROVEMENTS:
 
     HDFS-5437. Fix TestBlockReport and TestBPOfferService failures. (Arpit
     Agarwal)
+
+    HDFS-5447. Fix TestJspHelper. (Arpit Agarwal)
+

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java?rev=1538145&r1=1538144&r2=1538145&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java
Sat Nov  2 05:21:48 2013
@@ -43,9 +43,6 @@ import org.apache.hadoop.hdfs.util.Light
 import org.apache.hadoop.util.IntrusiveCollection;
 import org.apache.hadoop.util.Time;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-
 /**
  * This class extends the DatanodeInfo class with ephemeral information (eg
  * health, capacity, what blocks are associated with the Datanode) that is
@@ -218,26 +215,7 @@ public class DatanodeDescriptor extends 
    */
   public DatanodeDescriptor(DatanodeID nodeID, 
                             String networkLocation) {
-    this(nodeID, networkLocation, 0, 0, 0, 0);
-  }
-
-  /**
-   * DatanodeDescriptor constructor
-   * @param nodeID id of the data node
-   * @param networkLocation location of the data node in network
-   * @param cacheCapacity cache capacity of the data node
-   * @param cacheUsed cache used on the data node
-   * @param xceiverCount # of data transfers at the data node
-   */
-  public DatanodeDescriptor(DatanodeID nodeID,
-                            String networkLocation,
-                            long cacheCapacity,
-                            long cacheUsed,
-                            int xceiverCount,
-                            int failedVolumes) {
     super(nodeID, networkLocation);
-    updateHeartbeat(StorageReport.EMPTY_ARRAY, cacheCapacity, cacheUsed,
-      xceiverCount, failedVolumes);
   }
 
   /**
@@ -638,7 +616,8 @@ public class DatanodeDescriptor extends 
     return sb.toString();
   }
 
-  DatanodeStorageInfo updateStorage(DatanodeStorage s) {
+  @VisibleForTesting
+  public DatanodeStorageInfo updateStorage(DatanodeStorage s) {
     synchronized (storageMap) {
       DatanodeStorageInfo storage = storageMap.get(s.getStorageID());
       if (storage == null) {

Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java?rev=1538145&r1=1538144&r2=1538145&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
(original)
+++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
Sat Nov  2 05:21:48 2013
@@ -25,6 +25,8 @@ import org.apache.hadoop.hdfs.protocol.D
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
+import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
+import org.apache.hadoop.hdfs.server.protocol.StorageReport;
 import org.apache.hadoop.hdfs.web.resources.DoAsParam;
 import org.apache.hadoop.hdfs.web.resources.UserParam;
 import org.apache.hadoop.io.DataInputBuffer;
@@ -451,10 +453,24 @@ public class TestJspHelper {
         1234, 2345, 3456, 4567);
     DatanodeID dnId2 = new DatanodeID("127.0.0.2", "localhost2", "datanode2",
         1235, 2346, 3457, 4568);
-    DatanodeDescriptor dnDesc1 = new DatanodeDescriptor(
-        dnId1, "rack1", 5l, 3l, 10, 2);
-    DatanodeDescriptor dnDesc2 = new DatanodeDescriptor(
-        dnId2, "rack2", 10l, 2l, 20, 1);
+
+    // Setup DatanodeDescriptors with one storage each.
+    DatanodeDescriptor dnDesc1 = new DatanodeDescriptor(dnId1, "rack1");
+    DatanodeDescriptor dnDesc2 = new DatanodeDescriptor(dnId2, "rack2");
+
+    // Update the DatanodeDescriptors with their attached storages.
+    dnDesc1.updateStorage(new DatanodeStorage("dnStorage1"));
+    dnDesc2.updateStorage(new DatanodeStorage("dnStorage2"));
+
+    StorageReport[] report1 = new StorageReport[] {
+        new StorageReport("dnStorage1", false, 1024, 100, 924, 100)
+    };
+    StorageReport[] report2 = new StorageReport[] {
+        new StorageReport("dnStorage2", false, 2500, 200, 1848, 200)
+    };
+    dnDesc1.updateHeartbeat(report1, 5l, 3l, 10, 2);
+    dnDesc2.updateHeartbeat(report2, 10l, 2l, 20, 1);
+
     ArrayList<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
     live.add(dnDesc1);
     live.add(dnDesc2);



Mime
View raw message