hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r805508 - in /hadoop/hdfs/trunk: ./ src/test/aop/org/apache/hadoop/fi/ src/test/aop/org/apache/hadoop/hdfs/protocol/ src/test/aop/org/apache/hadoop/hdfs/server/datanode/
Date Tue, 18 Aug 2009 17:41:22 GMT
Author: szetszwo
Date: Tue Aug 18 17:41:22 2009
New Revision: 805508

URL: http://svn.apache.org/viewvc?rev=805508&view=rev
Log:
HDFS-539. Refactor fault injeciton pipeline test util for future reuse.  Contributed by Konstantin
Boudnik

Added:
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/Pipeline.java
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/PipelineTest.java
Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/protocol/ClientProtocolAspects.aj
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataTransferProtocolAspects.aj
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=805508&r1=805507&r2=805508&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Tue Aug 18 17:41:22 2009
@@ -47,7 +47,7 @@
     via szetszwo)
 
     HDFS-493. Change build.xml so that the fault-injected tests are executed
-    only by the run-test-*-faul-inject targets.  (Konstantin Boudnik via
+    only by the run-test-*-fault-inject targets.  (Konstantin Boudnik via
     szetszwo)
 
     HDFS-446. Improvements to Offline Image Viewer. (Jakob Homan via shv)
@@ -94,11 +94,14 @@
 
     HDFS-546. DatanodeDescriptor iterator blocks as BlockInfo. (shv)
 
-    HDFS-457. Do not shutdown datanode if some, but not all, volumns fail.
+    HDFS-457. Do not shutdown datanode if some, but not all, volumes fail.
     (Boris Shkolnik via szetszwo)
 
     HDFS-548. TestFsck takes nearly 10 minutes to run. (hairong)
 
+    HDFS-539. Refactor fault injeciton pipeline test util for future reuse.
+    (Konstantin Boudnik via szetszwo)
+
   BUG FIXES
 
     HDFS-76. Better error message to users when commands fail because of 
@@ -111,7 +114,7 @@
     HADOOP-6096. Fix Eclipse project and classpath files following project
     split. (tomwhite)
 
-    HDFS-195. Handle expired tokens when write pipeline is restablished.
+    HDFS-195. Handle expired tokens when write pipeline is reestablished.
     (Kan Zhang via rangadi)
 
     HDFS-181. Validate src path in FSNamesystem.getFileInfo(..).  (Todd

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java?rev=805508&r1=805507&r2=805508&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java (original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java Tue Aug
18 17:41:22 2009
@@ -17,88 +17,80 @@
  */
 package org.apache.hadoop.fi;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.hadoop.fi.FiTestUtil.Action;
 import org.apache.hadoop.fi.FiTestUtil.ActionContainer;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
-import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
 
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * Utilities for DataTransferProtocol related tests,
  * e.g. TestFiDataTransferProtocol.
  */
 public class DataTransferTestUtil {
-  private static DataTransferTest thepipelinetest;
+  protected static PipelineTest thepipelinetest;
   /** initialize pipeline test */
-  public static DataTransferTest initTest() {
+  public static PipelineTest initTest() {
     return thepipelinetest = new DataTransferTest();
   }
   /** get the pipeline test object */
-  public static DataTransferTest getPipelineTest() {
+  public static PipelineTest getPipelineTest() {
     return thepipelinetest;
   }
+  /** get the pipeline test object cast to DataTransferTest */
+  public static DataTransferTest getDataTransferTest() {
+    return (DataTransferTest)getPipelineTest();
+  }
 
   /**
    * The DataTransferTest class includes a pipeline
    * and some actions.
    */
-  public static class DataTransferTest {
-    private Pipeline thepipeline;
+  public static class DataTransferTest implements PipelineTest {
+    private List<Pipeline> pipelines = new ArrayList<Pipeline>();
+
     /** Simulate action for the receiverOpWriteBlock pointcut */
-    public final ActionContainer<DataNode> fiReceiverOpWriteBlock
-        = new ActionContainer<DataNode>();
+    public final ActionContainer<DatanodeID> fiReceiverOpWriteBlock
+        = new ActionContainer<DatanodeID>();
     /** Simulate action for the callReceivePacket pointcut */
-    public final ActionContainer<DataNode> fiCallReceivePacket
-        = new ActionContainer<DataNode>();
+    public final ActionContainer<DatanodeID> fiCallReceivePacket
+        = new ActionContainer<DatanodeID>();
     /** Simulate action for the statusRead pointcut */
-    public final ActionContainer<DataNode> fiStatusRead
-        = new ActionContainer<DataNode>();
+    public final ActionContainer<DatanodeID> fiStatusRead
+        = new ActionContainer<DatanodeID>();
 
     /** Initialize the pipeline. */
     public Pipeline initPipeline(LocatedBlock lb) {
-      if (thepipeline != null) {
+      final Pipeline pl = new Pipeline(lb);
+      if (pipelines.contains(pl)) {
         throw new IllegalStateException("thepipeline != null");
       }
-      return thepipeline = new Pipeline(lb);
+      pipelines.add(pl);
+      return pl;
     }
 
     /** Return the pipeline. */
-    public Pipeline getPipeline() {
-      if (thepipeline == null) {
+    public Pipeline getPipeline(DatanodeID id) {
+      if (pipelines == null) {
         throw new IllegalStateException("thepipeline == null");
       }
-      return thepipeline;
-    }
-  }
-
-  /** A pipeline contains a list of datanodes. */
-  public static class Pipeline {
-    private final List<String> datanodes = new ArrayList<String>();
-    
-    private Pipeline(LocatedBlock lb) {
-      for(DatanodeInfo d : lb.getLocations()) {
-        datanodes.add(d.getName());
+      StringBuilder dnString = new StringBuilder();
+      for (Pipeline pipeline : pipelines) {
+        for (DatanodeInfo dni : pipeline.getDataNodes())
+          dnString.append(dni.getStorageID());
+        if (dnString.toString().contains(id.getStorageID()))
+          return pipeline;
       }
-    }
-
-    /** Does the pipeline contains d at the n th position? */
-    public boolean contains(int n, DatanodeID d) {
-      return d.getName().equals(datanodes.get(n));
-    }
-
-    /** {@inheritDoc} */
-    public String toString() {
-      return getClass().getSimpleName() + datanodes;
+      return null;
     }
   }
 
   /** Action for DataNode */
-  public static abstract class DataNodeAction implements Action<DataNode> {
+  public static abstract class DataNodeAction implements Action<DatanodeID> {
     /** The name of the test */
     final String currentTest;
     /** The index of the datanode */
@@ -108,7 +100,7 @@
      * @param currentTest The name of the test
      * @param index The index of the datanode
      */
-    private DataNodeAction(String currentTest, int index) {
+    protected DataNodeAction(String currentTest, int index) {
       this.currentTest = currentTest;
       this.index = index;
     }
@@ -118,10 +110,11 @@
       return currentTest + ", index=" + index;
     }
 
-    /** {@inheritDoc} */
-    String toString(DataNode datanode) {
+    /** {@inheritDoc}
+     * @param datanodeID*/
+    String toString(DatanodeID datanodeID) {
       return "FI: " + this + ", datanode="
-          + datanode.getDatanodeRegistration().getName();
+          + datanodeID.getName();
     }
   }
 
@@ -133,10 +126,10 @@
     }
 
     @Override
-    public void run(DataNode datanode) {
-      final Pipeline p = getPipelineTest().getPipeline();
-      if (p.contains(index, datanode.getDatanodeRegistration())) {
-        final String s = toString(datanode);
+    public void run(DatanodeID id) {
+      final Pipeline p = getPipelineTest().getPipeline(id);
+      if (p.contains(index, id)) {
+        final String s = toString(id);
         FiTestUtil.LOG.info(s);
         throw new OutOfMemoryError(s);
       }
@@ -151,10 +144,10 @@
     }
 
     @Override
-    public void run(DataNode datanode) throws DiskOutOfSpaceException {
-      final Pipeline p = getPipelineTest().getPipeline();
-      if (p.contains(index, datanode.getDatanodeRegistration())) {
-        final String s = toString(datanode);
+    public void run(DatanodeID id) throws DiskOutOfSpaceException {
+      final Pipeline p = getPipelineTest().getPipeline(id);
+      if (p.contains(index, id)) {
+        final String s = toString(id);
         FiTestUtil.LOG.info(s);
         throw new DiskOutOfSpaceException(s);
       }
@@ -179,10 +172,10 @@
     }
 
     @Override
-    public void run(DataNode datanode) {
-      final Pipeline p = getPipelineTest().getPipeline();
-      if (p.contains(index, datanode.getDatanodeRegistration())) {
-        final String s = toString(datanode) + ", duration=" + duration;
+    public void run(DatanodeID id) {
+      final Pipeline p = getPipelineTest().getPipeline(id);
+      if (p.contains(index, id)) {
+        final String s = toString(id) + ", duration=" + duration;
         FiTestUtil.LOG.info(s);
         if (duration <= 0) {
           for(; true; FiTestUtil.sleep(1000)); //sleep forever

Added: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/Pipeline.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/Pipeline.java?rev=805508&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/Pipeline.java (added)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/Pipeline.java Tue Aug 18 17:41:22
2009
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fi;
+
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.DatanodeID;
+
+import java.util.List;
+import java.util.ArrayList;
+
+public class Pipeline {
+  private final List<String> datanodes = new ArrayList<String>();
+  private DatanodeInfo[] nodes;
+
+  Pipeline(LocatedBlock lb) {
+    for(DatanodeInfo d : lb.getLocations()) {
+      datanodes.add(d.getName());
+    }
+    nodes = lb.getLocations();
+  }
+
+  /** Does the pipeline contains d at the n th position? */
+  public boolean contains(int n, DatanodeID d) {
+    return d.getName().equals(datanodes.get(n));
+  }
+
+  /** Returns DatanodeInfo[] of the nodes of the constructed pipiline*/
+  public DatanodeInfo[] getDataNodes () {
+    return nodes;
+  }
+
+  /** {@inheritDoc} */
+  public String toString() {
+    return getClass().getSimpleName() + datanodes;
+  }
+}

Added: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/PipelineTest.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/PipelineTest.java?rev=805508&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/PipelineTest.java (added)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/PipelineTest.java Tue Aug 18 17:41:22
2009
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fi;
+
+import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+
+/** A pipeline contains a list of datanodes. */
+public interface PipelineTest {
+  public Pipeline initPipeline(LocatedBlock lb);
+  public Pipeline getPipeline(DatanodeID id);
+}

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/protocol/ClientProtocolAspects.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/protocol/ClientProtocolAspects.aj?rev=805508&r1=805507&r2=805508&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/protocol/ClientProtocolAspects.aj
(original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/protocol/ClientProtocolAspects.aj
Tue Aug 18 17:41:22 2009
@@ -20,6 +20,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fi.DataTransferTestUtil;
+import org.apache.hadoop.fi.PipelineTest;
 
 /** Aspect for ClientProtocol */
 public aspect ClientProtocolAspects {
@@ -29,7 +30,9 @@
     call(LocatedBlock ClientProtocol.addBlock(String, String));
 
   after() returning(LocatedBlock lb): addBlock() {
-    LOG.info("FI: addBlock "
-        + DataTransferTestUtil.getPipelineTest().initPipeline(lb));
+    PipelineTest pipelineTest = DataTransferTestUtil.getPipelineTest();
+    if (pipelineTest != null)
+      LOG.info("FI: addBlock "
+          + pipelineTest.initPipeline(lb));
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj?rev=805508&r1=805507&r2=805508&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
(original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
Tue Aug 18 17:41:22 2009
@@ -22,6 +22,7 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest;
 import org.apache.hadoop.fi.DataTransferTestUtil;
 import org.apache.hadoop.fi.ProbabilityModel;
 import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
@@ -44,8 +45,10 @@
   before(BlockReceiver blockreceiver
       ) throws IOException : callReceivePacket(blockreceiver) {
     LOG.info("FI: callReceivePacket");
-    DataTransferTestUtil.getPipelineTest().fiCallReceivePacket.run(
-        blockreceiver.getDataNode());
+    DataTransferTest dtTest = DataTransferTestUtil.getDataTransferTest();
+    if (dtTest != null)
+      dtTest.fiCallReceivePacket.run(
+          blockreceiver.getDataNode().getDatanodeRegistration());
 
     if (ProbabilityModel.injectCriteria(BlockReceiver.class.getSimpleName())) {
       LOG.info("Before the injection point");

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataTransferProtocolAspects.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataTransferProtocolAspects.aj?rev=805508&r1=805507&r2=805508&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataTransferProtocolAspects.aj
(original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/DataTransferProtocolAspects.aj
Tue Aug 18 17:41:22 2009
@@ -24,6 +24,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fi.DataTransferTestUtil;
+import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest;
 import org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Op;
 import org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Receiver;
 import org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Status;
@@ -59,7 +60,8 @@
     final DataNode d = dataxceiver.getDataNode();
     LOG.info("FI: statusRead " + status + ", datanode="
         + d.getDatanodeRegistration().getName());    
-    DataTransferTestUtil.getPipelineTest().fiStatusRead.run(d);
+    DataTransferTestUtil.getDataTransferTest().fiStatusRead.run(
+        d.getDatanodeRegistration());
   }
 
   pointcut receiverOpWriteBlock(DataXceiver dataxceiver):
@@ -68,7 +70,9 @@
   before(DataXceiver dataxceiver
       ) throws IOException: receiverOpWriteBlock(dataxceiver) {
     LOG.info("FI: receiverOpWriteBlock");
-    DataTransferTestUtil.getPipelineTest().fiReceiverOpWriteBlock.run(
-        dataxceiver.getDataNode());
+    DataTransferTest dtTest = DataTransferTestUtil.getDataTransferTest();
+    if (dtTest != null)
+      dtTest.fiReceiverOpWriteBlock.run(
+          dataxceiver.getDataNode().getDatanodeRegistration());
   }
 }

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java?rev=805508&r1=805507&r2=805508&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
(original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
Tue Aug 18 17:41:22 2009
@@ -31,6 +31,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.protocol.DatanodeID;
 
 /** Test DataTransferProtocol with fault injection. */
 public class TestFiDataTransferProtocol extends junit.framework.TestCase {
@@ -79,7 +80,7 @@
   private static void runSlowDatanodeTest(String methodName, SleepAction a
                   ) throws IOException {
     FiTestUtil.LOG.info("Running " + methodName + " ...");
-    final DataTransferTest t = DataTransferTestUtil.initTest();
+    final DataTransferTest t = (DataTransferTest)DataTransferTestUtil.initTest();
     t.fiCallReceivePacket.set(a);
     t.fiReceiverOpWriteBlock.set(a);
     t.fiStatusRead.set(a);
@@ -105,9 +106,9 @@
   }
 
   private static void runCallReceivePacketTest(String methodName,
-      Action<DataNode> a) throws IOException {
+      Action<DatanodeID> a) throws IOException {
     FiTestUtil.LOG.info("Running " + methodName + " ...");
-    DataTransferTestUtil.initTest().fiCallReceivePacket.set(a);
+    ((DataTransferTest)DataTransferTestUtil.initTest()).fiCallReceivePacket.set(a);
     write1byte(methodName);
   }
 



Mime
View raw message