hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bor...@apache.org
Subject svn commit: r1075989 - in /hadoop/hdfs/branches/HDFS-1052: ./ src/java/org/apache/hadoop/hdfs/server/datanode/ src/test/aop/org/apache/hadoop/hdfs/server/datanode/
Date Tue, 01 Mar 2011 19:29:06 GMT
Author: boryas
Date: Tue Mar  1 19:29:06 2011
New Revision: 1075989

URL: http://svn.apache.org/viewvc?rev=1075989&view=rev
Log:
HDFS-1671.HDFS Federation: shutdown in DataNode should be able to shutdown individual BP threads
as well as the whole DN

Modified:
    hadoop/hdfs/branches/HDFS-1052/CHANGES.txt
    hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
    hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
    hadoop/hdfs/branches/HDFS-1052/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj

Modified: hadoop/hdfs/branches/HDFS-1052/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1052/CHANGES.txt?rev=1075989&r1=1075988&r2=1075989&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1052/CHANGES.txt (original)
+++ hadoop/hdfs/branches/HDFS-1052/CHANGES.txt Tue Mar  1 19:29:06 2011
@@ -88,6 +88,9 @@ Trunk (unreleased changes)
     HDFS-1662. HDFS federation: fix unit test case, TestCheckpoint 
     and TestDataNodeMXBean (tanping via boryas)
 
+    HDFS-1671. HDFS Federation: shutdown in DataNode should be able to 
+    shutdown individual BP threads as well as the whole DN (boryas).
+
   IMPROVEMENTS
 
     HDFS-1510. Added test-patch.properties required by test-patch.sh (nigel)

Modified: hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1075989&r1=1075988&r2=1075989&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
(original)
+++ hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
Tue Mar  1 19:29:06 2011
@@ -254,7 +254,11 @@ public class DataNode extends Configured
     void shutDownAll() throws InterruptedException {
       BPOfferService[] bposArray = this.getAllNamenodeThreads();
       for (BPOfferService bpos : bposArray) {
-        bpos.stop();
+        bpos.stop(); //interrupts the threads
+      }
+      //now join
+      for (BPOfferService bpos : bposArray) {
+        bpos.join();
       }
     }
     
@@ -300,6 +304,7 @@ public class DataNode extends Configured
 
         for (BPOfferService bpos : toShutdown) {
           bpos.stop();
+          bpos.join();
         }
         // Now start the threads that are not already running.
         startAll();
@@ -857,20 +862,17 @@ public class DataNode extends Configured
     void stop() {
       shouldServiceRun = false;
       if (bpThread != null) {
-        try {
           bpThread.interrupt();
-          bpThread.join();
-        } catch (InterruptedException ex) {
-          LOG.warn("Received exception: ", ex);
-        }
       }
     }
     
     //This must be called only by blockPoolManager
-    void join() throws InterruptedException {
-      if (bpThread != null) {
-        bpThread.join();
-      }
+    void join() {
+      try {
+        if (bpThread != null) {
+          bpThread.join();
+        }
+      } catch (InterruptedException ie) { }
     }
     
     //Cleanup method to be called by current thread before exiting.
@@ -1318,7 +1320,7 @@ public class DataNode extends Configured
     return datanodeId.ipcPort;
   }
   
-  DatanodeID getDataNodeId() {
+  DatanodeID getDatanodeId() {
     return datanodeId;
   }
   
@@ -1486,10 +1488,12 @@ public class DataNode extends Configured
       }
     }
     
-    try {
-      this.blockPoolManager.shutDownAll();
-    } catch (InterruptedException ie) {
-      LOG.warn("Received exception in BlockPoolManager#shutDownAll: ", ie);
+    if(blockPoolManager != null) {
+      try {
+        this.blockPoolManager.shutDownAll();
+      } catch (InterruptedException ie) {
+        LOG.warn("Received exception in BlockPoolManager#shutDownAll: ", ie);
+      }
     }
 
     if(upgradeManager != null)
@@ -1930,6 +1934,7 @@ public class DataNode extends Configured
     while (shouldRun) {
       try {
         blockPoolManager.joinAll();
+        Thread.sleep(2000);
       } catch (InterruptedException ex) {
         LOG.warn("Received exception in Datanode#join: " + ex);
       }

Modified: hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java?rev=1075989&r1=1075988&r2=1075989&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
(original)
+++ hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
Tue Mar  1 19:29:06 2011
@@ -185,7 +185,7 @@ public class DataStorage extends Storage
     
     // make sure we have storage id set - if not - generate new one
     if(storageID.isEmpty()) {
-      DataNode.setNewStorageID(DataNode.datanodeObject.getDataNodeId());
+      DataNode.setNewStorageID(DataNode.datanodeObject.getDatanodeId());
       storageID = DataNode.datanodeObject.getStorageId();
     }
     

Modified: hadoop/hdfs/branches/HDFS-1052/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1052/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj?rev=1075989&r1=1075988&r2=1075989&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1052/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
(original)
+++ hadoop/hdfs/branches/HDFS-1052/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
Tue Mar  1 19:29:06 2011
@@ -51,6 +51,7 @@ privileged public aspect BlockReceiverAs
   before(BlockReceiver blockreceiver
       ) throws IOException : callReceivePacket(blockreceiver) {
     final String dnName = blockreceiver.getDataNode().getMachineName();
+    final DatanodeID dnId = blockreceiver.getDataNode().getDatanodeId();
     LOG.info("FI: callReceivePacket, datanode=" + dnName);
     DataTransferTest dtTest = DataTransferTestUtil.getDataTransferTest();
     if (dtTest != null)
@@ -104,7 +105,7 @@ privileged public aspect BlockReceiverAs
     if (!(pTest instanceof PipelinesTest)) {
       return;
     }
-    NodeBytes nb = new NodeBytes(br.datanode.dnId, offset);
+    NodeBytes nb = new NodeBytes(br.datanode.getDatanodeId(), offset);
     try {
       ((PipelinesTest)pTest).fiCallSetNumBytes.run(nb);
     } catch (IOException e) {
@@ -134,7 +135,7 @@ privileged public aspect BlockReceiverAs
 
   private void bytesAckedService 
       (final PipelinesTest pTest, final PacketResponder pr, final long acked) {
-    NodeBytes nb = new NodeBytes(pr.receiver.datanode.dnId, acked);
+    NodeBytes nb = new NodeBytes(pr.receiver.datanode.getDatanodeId(), acked);
     try {
       pTest.fiCallSetBytesAcked.run(nb);
     } catch (IOException e) {



Mime
View raw message