hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r807423 - in /hadoop/hdfs/trunk: ./ src/java/org/apache/hadoop/hdfs/ src/java/org/apache/hadoop/hdfs/server/common/ src/java/org/apache/hadoop/hdfs/server/datanode/ src/test/aop/org/apache/hadoop/fi/ src/test/aop/org/apache/hadoop/hdfs/ src...
Date Mon, 24 Aug 2009 22:46:50 GMT
Author: szetszwo
Date: Mon Aug 24 22:46:50 2009
New Revision: 807423

URL: http://svn.apache.org/viewvc?rev=807423&view=rev
Log:
HDFS-561. Fix write pipeline READ_TIMEOUT in DataTransferProtocol.  Contributed by Kan Zhang

Added:
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/HdfsConstants.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=807423&r1=807422&r2=807423&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Mon Aug 24 22:46:50 2009
@@ -170,6 +170,9 @@
     HDFS-532. Allow applications to know that a read request failed 
     because block is missing. (dhruba)
 
+    HDFS-561. Fix write pipeline READ_TIMEOUT in DataTransferProtocol.
+    (Kan Zhang via szetszwo)
+
 Release 0.20.1 - Unreleased
 
   IMPROVEMENTS

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java?rev=807423&r1=807422&r2=807423&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSClient.java Mon Aug 24 22:46:50 2009
@@ -2338,7 +2338,7 @@
     // it. When all the packets for a block are sent out and acks for each
     // if them are received, the DataStreamer closes the current block.
     //
-    private class DataStreamer extends Daemon {
+    class DataStreamer extends Daemon {
       private static final int MAX_RECOVERY_ERROR_COUNT = 5; // try block recovery 5 times
       private int recoveryErrorCount = 0; // number of times block recovery failed
       private volatile boolean streamerClosed = false;
@@ -2348,8 +2348,8 @@
       private DataInputStream blockReplyStream;
       private ResponseProcessor response = null;
       private volatile DatanodeInfo[] nodes = null; // list of targets for current block
-      private volatile boolean hasError = false;
-      private volatile int errorIndex = 0;
+      volatile boolean hasError = false;
+      volatile int errorIndex = 0;
   
       /*
        * streamer thread is the only thread that opens streams to datanode, 
@@ -2830,7 +2830,8 @@
           LOG.debug("Connecting to " + nodes[0].getName());
           InetSocketAddress target = NetUtils.createSocketAddr(nodes[0].getName());
           s = socketFactory.createSocket();
-          int timeoutValue = (socketTimeout > 0) ? (3000 * nodes.length + socketTimeout)
: 0;
+          int timeoutValue = (socketTimeout > 0) ? (HdfsConstants.READ_TIMEOUT_EXTENSION
+              * nodes.length + socketTimeout) : 0;
           NetUtils.connect(s, target, timeoutValue);
           s.setSoTimeout(timeoutValue);
           s.setSendBufferSize(DEFAULT_DATA_SOCKET_SIZE);

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/HdfsConstants.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/HdfsConstants.java?rev=807423&r1=807422&r2=807423&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/HdfsConstants.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/HdfsConstants.java Mon
Aug 24 22:46:50 2009
@@ -60,6 +60,7 @@
 
   // Timeouts for communicating with DataNode for streaming writes/reads
   public static int READ_TIMEOUT = 60 * 1000;
+  public static int READ_TIMEOUT_EXTENSION = 5 * 1000;
   public static int WRITE_TIMEOUT = 8 * 60 * 1000;
   public static int WRITE_TIMEOUT_EXTENSION = 5 * 1000; //for write pipeline
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java?rev=807423&r1=807422&r2=807423&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java Mon
Aug 24 22:46:50 2009
@@ -267,7 +267,8 @@
         mirrorTarget = NetUtils.createSocketAddr(mirrorNode);
         mirrorSock = datanode.newSocket();
         try {
-          int timeoutValue = targets.length * datanode.socketTimeout;
+          int timeoutValue = datanode.socketTimeout
+              + (HdfsConstants.READ_TIMEOUT_EXTENSION * targets.length);
           int writeTimeout = datanode.socketWriteTimeout + 
                       (HdfsConstants.WRITE_TIMEOUT_EXTENSION * targets.length);
           NetUtils.connect(mirrorSock, mirrorTarget, timeoutValue);

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java?rev=807423&r1=807422&r2=807423&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java (original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java Mon Aug
24 22:46:50 2009
@@ -52,6 +52,7 @@
    */
   public static class DataTransferTest implements PipelineTest {
     private List<Pipeline> pipelines = new ArrayList<Pipeline>();
+    private volatile boolean isSuccess = false;
 
     /** Simulate action for the receiverOpWriteBlock pointcut */
     public final ActionContainer<DatanodeID> fiReceiverOpWriteBlock
@@ -62,6 +63,22 @@
     /** Simulate action for the statusRead pointcut */
     public final ActionContainer<DatanodeID> fiStatusRead
         = new ActionContainer<DatanodeID>();
+    /** Verification action for the pipelineInitNonAppend pointcut */
+    public final ActionContainer<Integer> fiPipelineInitErrorNonAppend
+        = new ActionContainer<Integer>();
+    /** Verification action for the pipelineErrorAfterInit pointcut */
+    public final ActionContainer<Integer> fiPipelineErrorAfterInit
+        = new ActionContainer<Integer>();
+
+    /** Get test status */
+    public boolean isSuccess() {
+      return this.isSuccess;
+    }
+
+    /** Set test status */
+    public void markSuccess() {
+      this.isSuccess = true;
+    }
 
     /** Initialize the pipeline. */
     public Pipeline initPipeline(LocatedBlock lb) {
@@ -127,8 +144,9 @@
 
     @Override
     public void run(DatanodeID id) {
-      final Pipeline p = getPipelineTest().getPipeline(id);
-      if (p.contains(index, id)) {
+      final DataTransferTest test = getDataTransferTest();
+      final Pipeline p = test.getPipeline(id);
+      if (!test.isSuccess() && p.contains(index, id)) {
         final String s = toString(id);
         FiTestUtil.LOG.info(s);
         throw new OutOfMemoryError(s);
@@ -145,7 +163,8 @@
 
     @Override
     public void run(DatanodeID id) throws DiskOutOfSpaceException {
-      final Pipeline p = getPipelineTest().getPipeline(id);
+      final DataTransferTest test = getDataTransferTest();
+      final Pipeline p = test.getPipeline(id);
       if (p.contains(index, id)) {
         final String s = toString(id);
         FiTestUtil.LOG.info(s);
@@ -173,8 +192,9 @@
 
     @Override
     public void run(DatanodeID id) {
-      final Pipeline p = getPipelineTest().getPipeline(id);
-      if (p.contains(index, id)) {
+      final DataTransferTest test = getDataTransferTest();
+      final Pipeline p = test.getPipeline(id);
+      if (!test.isSuccess() && p.contains(index, id)) {
         final String s = toString(id) + ", duration=" + duration;
         FiTestUtil.LOG.info(s);
         if (duration <= 0) {
@@ -185,4 +205,36 @@
       }
     }
   }
+
+  /** Action for pipeline error verification */
+  public static class VerificationAction implements Action<Integer> {
+    /** The name of the test */
+    final String currentTest;
+    /** The error index of the datanode */
+    final int errorIndex;
+
+    /**
+     * Create a verification action for errors at datanode i in the pipeline.
+     * 
+     * @param currentTest The name of the test
+     * @param i The error index of the datanode
+     */
+    public VerificationAction(String currentTest, int i) {
+      this.currentTest = currentTest;
+      this.errorIndex = i;
+    }
+
+    /** {@inheritDoc} */
+    public String toString() {
+      return currentTest + ", errorIndex=" + errorIndex;
+    }
+
+    @Override
+    public void run(Integer i) {
+      if (i == errorIndex) {
+        FiTestUtil.LOG.info(this + ", successfully verified.");
+        getDataTransferTest().markSuccess();
+      }
+    }
+  }
 }
\ No newline at end of file

Added: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj?rev=807423&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj (added)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj Mon Aug 24 22:46:50
2009
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fi.DataTransferTestUtil;
+import org.apache.hadoop.hdfs.DFSClient.DFSOutputStream.DataStreamer;
+
+import org.junit.Assert;
+
+/** Aspects for DFSClient */
+public aspect DFSClientAspects {
+  public static final Log LOG = LogFactory.getLog(DFSClientAspects.class);
+
+  pointcut callCreateBlockOutputStream(DataStreamer datastreamer):
+    call(* createBlockOutputStream(..)) && target(datastreamer);
+
+  before(DataStreamer datastreamer) : callCreateBlockOutputStream(datastreamer) {
+    Assert.assertFalse(datastreamer.hasError);
+    Assert.assertEquals(0, datastreamer.errorIndex);
+  }
+
+  pointcut pipelineInitNonAppend(DataStreamer datastreamer):
+    callCreateBlockOutputStream(datastreamer) 
+    && cflow(execution(* nextBlockOutputStream(..)))
+    && within(DataStreamer);
+
+  after(DataStreamer datastreamer) returning : pipelineInitNonAppend(datastreamer) {
+    LOG.info("FI: after pipelineInitNonAppend: hasError="
+        + datastreamer.hasError + " errorIndex=" + datastreamer.errorIndex);
+    try {
+      if (datastreamer.hasError) {
+        DataTransferTestUtil.getDataTransferTest().fiPipelineInitErrorNonAppend
+            .run(datastreamer.errorIndex);
+      }
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  pointcut pipelineInitAppend(DataStreamer datastreamer):
+    callCreateBlockOutputStream(datastreamer) 
+    && cflow(execution(* initAppend(..)))
+    && within(DataStreamer);
+
+  after(DataStreamer datastreamer) returning : pipelineInitAppend(datastreamer) {
+    LOG.info("FI: after pipelineInitAppend: hasError=" + datastreamer.hasError
+        + " errorIndex=" + datastreamer.errorIndex);
+  }
+
+  pointcut pipelineErrorAfterInit(boolean onError, boolean isAppend,
+      DataStreamer datastreamer):
+    call(* processDatanodeError(boolean, boolean))
+    && args(onError, isAppend)
+    && target(datastreamer)
+    && if(onError && !isAppend);
+
+  before(DataStreamer datastreamer) : pipelineErrorAfterInit(boolean, boolean, datastreamer)
{
+    LOG.info("FI: before pipelineErrorAfterInit: errorIndex="
+        + datastreamer.errorIndex);
+    try {
+      DataTransferTestUtil.getDataTransferTest().fiPipelineErrorAfterInit
+          .run(datastreamer.errorIndex);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+}

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java?rev=807423&r1=807422&r2=807423&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
(original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
Mon Aug 24 22:46:50 2009
@@ -26,6 +26,7 @@
 import org.apache.hadoop.fi.DataTransferTestUtil.DoosAction;
 import org.apache.hadoop.fi.DataTransferTestUtil.OomAction;
 import org.apache.hadoop.fi.DataTransferTestUtil.SleepAction;
+import org.apache.hadoop.fi.DataTransferTestUtil.VerificationAction;
 import org.apache.hadoop.fi.FiTestUtil.Action;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -33,6 +34,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
+
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -45,6 +47,7 @@
   static {
     conf.setInt("dfs.datanode.handler.count", 1);
     conf.setInt("dfs.replication", REPLICATION);
+    conf.setInt("dfs.socket.timeout", 5000);
   }
 
   static private FSDataOutputStream createFile(FileSystem fs, Path p
@@ -63,8 +66,8 @@
   private static void write1byte(String methodName) throws IOException {
     final MiniDFSCluster cluster = new MiniDFSCluster(conf, REPLICATION, true,
         null);
+    final FileSystem dfs = cluster.getFileSystem();
     try {
-      final FileSystem dfs = cluster.getFileSystem();
       final Path p = new Path("/" + methodName + "/foo");
       final FSDataOutputStream out = createFile(dfs, p);
       out.write(1);
@@ -76,6 +79,7 @@
       Assert.assertEquals(1, b);
     }
     finally {
+      dfs.close();
       cluster.shutdown();
     }
   }
@@ -90,6 +94,93 @@
     write1byte(methodName);
   }
   
+  private static void runReceiverOpWriteBlockTest(String methodName,
+      int errorIndex, Action<DatanodeID> a) throws IOException {
+    FiTestUtil.LOG.info("Running " + methodName + " ...");
+    final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
+        .initTest();
+    t.fiReceiverOpWriteBlock.set(a);
+    t.fiPipelineInitErrorNonAppend.set(new VerificationAction(methodName,
+        errorIndex));
+    write1byte(methodName);
+    Assert.assertTrue(t.isSuccess());
+  }
+  
+  private static void runStatusReadTest(String methodName, int errorIndex,
+      Action<DatanodeID> a) throws IOException {
+    FiTestUtil.LOG.info("Running " + methodName + " ...");
+    final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
+        .initTest();
+    t.fiStatusRead.set(a);
+    t.fiPipelineInitErrorNonAppend.set(new VerificationAction(methodName,
+        errorIndex));
+    write1byte(methodName);
+    Assert.assertTrue(t.isSuccess());
+  }
+
+  private static void runCallReceivePacketTest(String methodName,
+      int errorIndex, Action<DatanodeID> a) throws IOException {
+    FiTestUtil.LOG.info("Running " + methodName + " ...");
+    final DataTransferTest t = (DataTransferTest)DataTransferTestUtil.initTest();
+    t.fiCallReceivePacket.set(a);
+    t.fiPipelineErrorAfterInit.set(new VerificationAction(methodName, errorIndex));
+    write1byte(methodName);
+    Assert.assertTrue(t.isSuccess());
+  }
+
+  /**
+   * Pipeline setup:
+   * DN0 never responses after received setup request from client.
+   * Client gets an IOException and determine DN0 bad.
+   */
+  @Test
+  public void pipeline_Fi_01() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runReceiverOpWriteBlockTest(methodName, 0, new SleepAction(methodName, 0, 0));
+  }
+
+  /**
+   * Pipeline setup:
+   * DN1 never responses after received setup request from client.
+   * Client gets an IOException and determine DN1 bad.
+   */
+  @Test
+  public void pipeline_Fi_02() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runReceiverOpWriteBlockTest(methodName, 1, new SleepAction(methodName, 1, 0));
+  }
+
+  /**
+   * Pipeline setup:
+   * DN2 never responses after received setup request from client.
+   * Client gets an IOException and determine DN2 bad.
+   */
+  @Test
+  public void pipeline_Fi_03() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runReceiverOpWriteBlockTest(methodName, 2, new SleepAction(methodName, 2, 0));
+  }
+
+  /**
+   * Pipeline setup, DN1 never responses after received setup ack from DN2.
+   * Client gets an IOException and determine DN1 bad.
+   */
+  @Test
+  public void pipeline_Fi_04() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runStatusReadTest(methodName, 1, new SleepAction(methodName, 1, 0));
+  }
+
+  /**
+   * Pipeline setup, DN0 never responses after received setup ack from DN1.
+   * Client gets an IOException and determine DN0 bad.
+   */
+  @Test
+  public void pipeline_Fi_05() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runStatusReadTest(methodName, 0, new SleepAction(methodName, 0, 0));
+  }
+
   /**
    * Pipeline setup with DN0 very slow but it won't lead to timeout.
    * Client finishes setup successfully.
@@ -120,18 +211,37 @@
     runSlowDatanodeTest(methodName, new SleepAction(methodName, 2, 3000));
   }
 
-  private static void runCallReceivePacketTest(String methodName,
-      Action<DatanodeID> a) throws IOException {
-    FiTestUtil.LOG.info("Running " + methodName + " ...");
-    ((DataTransferTest)DataTransferTestUtil.initTest()).fiCallReceivePacket.set(a);
-    write1byte(methodName);
+  /**
+   * Pipeline setup, DN0 throws an OutOfMemoryException right after it
+   * received a setup request from client.
+   * Client gets an IOException and determine DN0 bad.
+   */
+  @Test
+  public void pipeline_Fi_09() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runReceiverOpWriteBlockTest(methodName, 0, new OomAction(methodName, 0));
   }
 
-  private static void runStatusReadTest(String methodName, Action<DatanodeID> a
-      ) throws IOException {
-    FiTestUtil.LOG.info("Running " + methodName + " ...");
-    ((DataTransferTest)DataTransferTestUtil.initTest()).fiStatusRead.set(a);
-    write1byte(methodName);
+  /**
+   * Pipeline setup, DN1 throws an OutOfMemoryException right after it
+   * received a setup request from DN0.
+   * Client gets an IOException and determine DN1 bad.
+   */
+  @Test
+  public void pipeline_Fi_10() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runReceiverOpWriteBlockTest(methodName, 1, new OomAction(methodName, 1));
+  }
+
+  /**
+   * Pipeline setup, DN2 throws an OutOfMemoryException right after it
+   * received a setup request from DN1.
+   * Client gets an IOException and determine DN2 bad.
+   */
+  @Test
+  public void pipeline_Fi_11() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runReceiverOpWriteBlockTest(methodName, 2, new OomAction(methodName, 2));
   }
 
   /**
@@ -142,7 +252,18 @@
   @Test
   public void pipeline_Fi_12() throws IOException {
     final String methodName = FiTestUtil.getMethodName();
-    runStatusReadTest(methodName, new OomAction(methodName, 1));
+    runStatusReadTest(methodName, 1, new OomAction(methodName, 1));
+  }
+
+  /**
+   * Pipeline setup, DN0 throws an OutOfMemoryException right after it
+   * received a setup ack from DN1.
+   * Client gets an IOException and determine DN0 bad.
+   */
+  @Test
+  public void pipeline_Fi_13() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runStatusReadTest(methodName, 0, new OomAction(methodName, 0));
   }
 
   /**
@@ -153,7 +274,7 @@
   @Test
   public void pipeline_Fi_14() throws IOException {
     final String methodName = FiTestUtil.getMethodName();
-    runCallReceivePacketTest(methodName, new DoosAction(methodName, 0));
+    runCallReceivePacketTest(methodName, 0, new DoosAction(methodName, 0));
   }
 
   /**
@@ -164,9 +285,9 @@
   @Test
   public void pipeline_Fi_15() throws IOException {
     final String methodName = FiTestUtil.getMethodName();
-    runCallReceivePacketTest(methodName, new DoosAction(methodName, 1));
+    runCallReceivePacketTest(methodName, 1, new DoosAction(methodName, 1));
   }
-
+  
   /**
    * Streaming: Write a packet, DN2 throws a DiskOutOfSpaceError
    * when it writes the data to disk.
@@ -175,6 +296,6 @@
   @Test
   public void pipeline_Fi_16() throws IOException {
     final String methodName = FiTestUtil.getMethodName();
-    runCallReceivePacketTest(methodName, new DoosAction(methodName, 2));
+    runCallReceivePacketTest(methodName, 2, new DoosAction(methodName, 2));
   }
 }
\ No newline at end of file



Mime
View raw message