hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1125608 - in /hadoop/hdfs/branches/yahoo-merge: ./ src/c++/libhdfs/ src/contrib/hdfsproxy/ src/java/ src/java/org/apache/hadoop/hdfs/server/datanode/ src/test/aop/org/apache/hadoop/hdfs/server/datanode/ src/test/hdfs/ src/webapps/datanode/...
Date Sat, 21 May 2011 01:10:41 GMT
Author: szetszwo
Date: Sat May 21 01:10:40 2011
New Revision: 1125608

URL: http://svn.apache.org/viewvc?rev=1125608&view=rev
Log:
svn merge -c 1090357 from trunk for HDFS-1817.

Modified:
    hadoop/hdfs/branches/yahoo-merge/   (props changed)
    hadoop/hdfs/branches/yahoo-merge/CHANGES.txt
    hadoop/hdfs/branches/yahoo-merge/build.xml   (props changed)
    hadoop/hdfs/branches/yahoo-merge/src/c++/libhdfs/   (props changed)
    hadoop/hdfs/branches/yahoo-merge/src/contrib/hdfsproxy/   (props changed)
    hadoop/hdfs/branches/yahoo-merge/src/java/   (props changed)
    hadoop/hdfs/branches/yahoo-merge/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
  (props changed)
    hadoop/hdfs/branches/yahoo-merge/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
    hadoop/hdfs/branches/yahoo-merge/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java
    hadoop/hdfs/branches/yahoo-merge/src/test/hdfs/   (props changed)
    hadoop/hdfs/branches/yahoo-merge/src/webapps/datanode/   (props changed)
    hadoop/hdfs/branches/yahoo-merge/src/webapps/hdfs/   (props changed)
    hadoop/hdfs/branches/yahoo-merge/src/webapps/secondary/   (props changed)

Propchange: hadoop/hdfs/branches/yahoo-merge/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat May 21 01:10:40 2011
@@ -2,4 +2,4 @@
 /hadoop/hdfs/branches/HDFS-1052:1078924,1078943,1080331,1080391,1080402,1081603,1082326,1084245,1086788,1090419
 /hadoop/hdfs/branches/HDFS-265:796829-820463
 /hadoop/hdfs/branches/branch-0.21:820487
-/hadoop/hdfs/trunk:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1036738,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576
+/hadoop/hdfs/trunk:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1036738,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1090357,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576

Modified: hadoop/hdfs/branches/yahoo-merge/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/yahoo-merge/CHANGES.txt?rev=1125608&r1=1125607&r2=1125608&view=diff
==============================================================================
--- hadoop/hdfs/branches/yahoo-merge/CHANGES.txt (original)
+++ hadoop/hdfs/branches/yahoo-merge/CHANGES.txt Sat May 21 01:10:40 2011
@@ -288,6 +288,9 @@ Trunk (unreleased changes)
     HDFS-1789. Refactor frequently used codes from DFSOutputStream and
     DataXceiver.  (szetszwo)
 
+    HDFS-1817. Move pipeline_Fi_[39-51] from TestFiDataTransferProtocol
+    to TestFiPipelineClose.  (szetszwo)
+
   OPTIMIZATIONS
 
   BUG FIXES

Propchange: hadoop/hdfs/branches/yahoo-merge/build.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat May 21 01:10:40 2011
@@ -3,4 +3,4 @@
 /hadoop/hdfs/branches/HDFS-1052/build.xml:1078924,1078943,1080331,1080391,1080402,1081603,1082326,1084245,1086788,1090419
 /hadoop/hdfs/branches/HDFS-265/build.xml:796829-820463
 /hadoop/hdfs/branches/branch-0.21/build.xml:820487
-/hadoop/hdfs/trunk/build.xml:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576
+/hadoop/hdfs/trunk/build.xml:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1090357,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576

Propchange: hadoop/hdfs/branches/yahoo-merge/src/c++/libhdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat May 21 01:10:40 2011
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/mapred/src/c++/libhdfs:713112
 /hadoop/core/trunk/src/c++/libhdfs:776175-784663
 /hadoop/hdfs/branches/HDFS-1052/src/c++/libhdfs:1078924,1078943,1080331,1080391,1080402,1081603,1082326,1084245,1086788,1090419
-/hadoop/hdfs/trunk/src/c++/libhdfs:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576
+/hadoop/hdfs/trunk/src/c++/libhdfs:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1090357,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576

Propchange: hadoop/hdfs/branches/yahoo-merge/src/contrib/hdfsproxy/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat May 21 01:10:40 2011
@@ -3,4 +3,4 @@
 /hadoop/hdfs/branches/HDFS-1052/src/contrib/hdfsproxy:1078924,1078943,1080331,1080391,1080402,1081603,1082326,1084245,1086788,1090419
 /hadoop/hdfs/branches/HDFS-265/src/contrib/hdfsproxy:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy:820487
-/hadoop/hdfs/trunk/src/contrib/hdfsproxy:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576
+/hadoop/hdfs/trunk/src/contrib/hdfsproxy:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1090357,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576

Propchange: hadoop/hdfs/branches/yahoo-merge/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat May 21 01:10:40 2011
@@ -3,4 +3,4 @@
 /hadoop/hdfs/branches/HDFS-1052/src/java:1078924,1078943,1080331,1080391,1080402,1081603,1082326,1084245,1086788,1090419
 /hadoop/hdfs/branches/HDFS-265/src/java:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/java:820487
-/hadoop/hdfs/trunk/src/java:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576
+/hadoop/hdfs/trunk/src/java:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1090357,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576

Propchange: hadoop/hdfs/branches/yahoo-merge/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat May 21 01:10:40 2011
@@ -5,4 +5,4 @@
 /hadoop/hdfs/branches/HDFS-1052/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:1078924,1078943,1080331,1080391,1080402,1081603,1082326,1084245,1086788,1090419
 /hadoop/hdfs/branches/HDFS-265/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:820487
-/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576
+/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1090357,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576

Modified: hadoop/hdfs/branches/yahoo-merge/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/yahoo-merge/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java?rev=1125608&r1=1125607&r2=1125608&view=diff
==============================================================================
--- hadoop/hdfs/branches/yahoo-merge/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
(original)
+++ hadoop/hdfs/branches/yahoo-merge/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
Sat May 21 01:10:40 2011
@@ -19,21 +19,15 @@ package org.apache.hadoop.hdfs.server.da
 
 import java.io.IOException;
 
-import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fi.DataTransferTestUtil;
-import org.apache.hadoop.fi.FiTestUtil;
-import org.apache.hadoop.fi.DataTransferTestUtil.DataNodeAction;
 import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest;
-import org.apache.hadoop.fi.DataTransferTestUtil.DatanodeMarkingAction;
 import org.apache.hadoop.fi.DataTransferTestUtil.DoosAction;
-import org.apache.hadoop.fi.DataTransferTestUtil.IoeAction;
 import org.apache.hadoop.fi.DataTransferTestUtil.OomAction;
 import org.apache.hadoop.fi.DataTransferTestUtil.SleepAction;
 import org.apache.hadoop.fi.DataTransferTestUtil.VerificationAction;
+import org.apache.hadoop.fi.FiTestUtil;
 import org.apache.hadoop.fi.FiTestUtil.Action;
-import org.apache.hadoop.fi.FiTestUtil.ConstraintSatisfactionAction;
-import org.apache.hadoop.fi.FiTestUtil.MarkerConstraint;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -70,9 +64,9 @@ public class TestFiDataTransferProtocol 
    * 4. open the same file
    * 5. read the 1 byte and compare results
    */
-  private static void write1byte(String methodName) throws IOException {
-    final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
-        .numDataNodes(REPLICATION).format(true).build();
+  static void write1byte(String methodName) throws IOException {
+    final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf
+        ).numDataNodes(REPLICATION).build();
     final FileSystem dfs = cluster.getFileSystem();
     try {
       final Path p = new Path("/" + methodName + "/foo");
@@ -305,184 +299,4 @@ public class TestFiDataTransferProtocol 
     final String methodName = FiTestUtil.getMethodName();
     runCallWritePacketToDisk(methodName, 2, new DoosAction(methodName, 2));
   }
-
-  private static void runPipelineCloseTest(String methodName,
-      Action<DatanodeID, IOException> a) throws IOException {
-    FiTestUtil.LOG.info("Running " + methodName + " ...");
-    final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
-        .initTest();
-    t.fiPipelineClose.set(a);
-    write1byte(methodName);
-  }
-
-  private static void run41_43(String name, int i) throws IOException {
-    runPipelineCloseTest(name, new SleepAction(name, i, 3000));
-  }
-
-  private static void runPipelineCloseAck(String name, int i, DataNodeAction a
-      ) throws IOException {
-    FiTestUtil.LOG.info("Running " + name + " ...");
-    final DataTransferTest t = (DataTransferTest)DataTransferTestUtil.initTest();
-    final MarkerConstraint marker = new MarkerConstraint(name);
-    t.fiPipelineClose.set(new DatanodeMarkingAction(name, i, marker));
-    t.fiPipelineAck.set(new ConstraintSatisfactionAction<DatanodeID, IOException>(a,
marker));
-    write1byte(name);
-  }
-
-  private static void run39_40(String name, int i) throws IOException {
-    runPipelineCloseAck(name, i, new SleepAction(name, i, 0));
-  }
-
-  /**
-   * Pipeline close:
-   * DN1 never responses after received close ack DN2.
-   * Client gets an IOException and determine DN1 bad.
-   */
-  @Test
-  public void pipeline_Fi_39() throws IOException {
-    run39_40(FiTestUtil.getMethodName(), 1);
-  }
-
-  /**
-   * Pipeline close:
-   * DN0 never responses after received close ack DN1.
-   * Client gets an IOException and determine DN0 bad.
-   */
-  @Test
-  public void pipeline_Fi_40() throws IOException {
-    run39_40(FiTestUtil.getMethodName(), 0);
-  }
-  
-  /**
-   * Pipeline close with DN0 very slow but it won't lead to timeout.
-   * Client finishes close successfully.
-   */
-  @Test
-  public void pipeline_Fi_41() throws IOException {
-    run41_43(FiTestUtil.getMethodName(), 0);
-  }
-
-  /**
-   * Pipeline close with DN1 very slow but it won't lead to timeout.
-   * Client finishes close successfully.
-   */
-  @Test
-  public void pipeline_Fi_42() throws IOException {
-    run41_43(FiTestUtil.getMethodName(), 1);
-  }
-
-  /**
-   * Pipeline close with DN2 very slow but it won't lead to timeout.
-   * Client finishes close successfully.
-   */
-  @Test
-  public void pipeline_Fi_43() throws IOException {
-    run41_43(FiTestUtil.getMethodName(), 2);
-  }
-
-  /**
-   * Pipeline close:
-   * DN0 throws an OutOfMemoryException
-   * right after it received a close request from client.
-   * Client gets an IOException and determine DN0 bad.
-   */
-  @Test
-  public void pipeline_Fi_44() throws IOException {
-    final String methodName = FiTestUtil.getMethodName();
-    runPipelineCloseTest(methodName, new OomAction(methodName, 0));
-  }
-
-  /**
-   * Pipeline close:
-   * DN1 throws an OutOfMemoryException
-   * right after it received a close request from client.
-   * Client gets an IOException and determine DN1 bad.
-   */
-  @Test
-  public void pipeline_Fi_45() throws IOException {
-    final String methodName = FiTestUtil.getMethodName();
-    runPipelineCloseTest(methodName, new OomAction(methodName, 1));
-  }
-
-  /**
-   * Pipeline close:
-   * DN2 throws an OutOfMemoryException
-   * right after it received a close request from client.
-   * Client gets an IOException and determine DN2 bad.
-   */
-  @Test
-  public void pipeline_Fi_46() throws IOException {
-    final String methodName = FiTestUtil.getMethodName();
-    runPipelineCloseTest(methodName, new OomAction(methodName, 2));
-  }
-
-  private static void run47_48(String name, int i) throws IOException {
-    runPipelineCloseAck(name, i, new OomAction(name, i));
-  }
-
-  /**
-   * Pipeline close:
-   * DN1 throws an OutOfMemoryException right after
-   * it received a close ack from DN2.
-   * Client gets an IOException and determine DN1 bad.
-   */
-  @Test
-  public void pipeline_Fi_47() throws IOException {
-    run47_48(FiTestUtil.getMethodName(), 1);
-  }
-
-  /**
-   * Pipeline close:
-   * DN0 throws an OutOfMemoryException right after
-   * it received a close ack from DN1.
-   * Client gets an IOException and determine DN0 bad.
-   */
-  @Test
-  public void pipeline_Fi_48() throws IOException {
-    run47_48(FiTestUtil.getMethodName(), 0);
-  }
-
-  private static void runBlockFileCloseTest(String methodName,
-      Action<DatanodeID, IOException> a) throws IOException {
-    FiTestUtil.LOG.info("Running " + methodName + " ...");
-    final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
-        .initTest();
-    t.fiBlockFileClose.set(a);
-    write1byte(methodName);
-  }
-
-  private static void run49_51(String name, int i) throws IOException {
-    runBlockFileCloseTest(name, new IoeAction(name, i, "DISK ERROR"));
-  }
-
-  /**
-   * Pipeline close:
-   * DN0 throws a disk error exception when it is closing the block file.
-   * Client gets an IOException and determine DN0 bad.
-   */
-  @Test
-  public void pipeline_Fi_49() throws IOException {
-    run49_51(FiTestUtil.getMethodName(), 0);
-  }
-
-
-  /**
-   * Pipeline close:
-   * DN1 throws a disk error exception when it is closing the block file.
-   * Client gets an IOException and determine DN1 bad.
-   */
-  @Test
-  public void pipeline_Fi_50() throws IOException {
-    run49_51(FiTestUtil.getMethodName(), 1);
-  }
-
-  /**
-   * Pipeline close:
-   * DN2 throws a disk error exception when it is closing the block file.
-   * Client gets an IOException and determine DN2 bad.
-   */
-  @Test
-  public void pipeline_Fi_51() throws IOException {
-    run49_51(FiTestUtil.getMethodName(), 2);
-  }
-}
\ No newline at end of file
+}

Modified: hadoop/hdfs/branches/yahoo-merge/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/yahoo-merge/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java?rev=1125608&r1=1125607&r2=1125608&view=diff
==============================================================================
--- hadoop/hdfs/branches/yahoo-merge/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java
(original)
+++ hadoop/hdfs/branches/yahoo-merge/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java
Sat May 21 01:10:40 2011
@@ -19,76 +19,29 @@ package org.apache.hadoop.hdfs.server.da
 
 import java.io.IOException;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fi.DataTransferTestUtil;
-import org.apache.hadoop.fi.FiTestUtil;
+import org.apache.hadoop.fi.DataTransferTestUtil.DataNodeAction;
 import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest;
+import org.apache.hadoop.fi.DataTransferTestUtil.DatanodeMarkingAction;
+import org.apache.hadoop.fi.DataTransferTestUtil.IoeAction;
+import org.apache.hadoop.fi.DataTransferTestUtil.OomAction;
 import org.apache.hadoop.fi.DataTransferTestUtil.SleepAction;
+import org.apache.hadoop.fi.FiTestUtil;
 import org.apache.hadoop.fi.FiTestUtil.Action;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.fi.FiTestUtil.ConstraintSatisfactionAction;
+import org.apache.hadoop.fi.FiTestUtil.MarkerConstraint;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
-import org.junit.Assert;
 import org.junit.Test;
 
 /** Test DataTransferProtocol with fault injection. */
 public class TestFiPipelineClose {
-  static final short REPLICATION = 3;
-  static final long BLOCKSIZE = 1L * (1L << 20);
-
-  static final Configuration conf = new HdfsConfiguration();
-  static {
-    conf.setInt("dfs.datanode.handler.count", 1);
-    conf.setInt("dfs.replication", REPLICATION);
-    conf.setInt(DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, 5000);
-  }
-
-  static private FSDataOutputStream createFile(FileSystem fs, Path p
-      ) throws IOException {
-    return fs.create(p, true, fs.getConf().getInt("io.file.buffer.size", 4096),
-        REPLICATION, BLOCKSIZE);
-  }
-
-  /**
-   * 1. create files with dfs
-   * 2. write 1 byte
-   * 3. close file
-   * 4. open the same file
-   * 5. read the 1 byte and compare results
-   */
-  private static void write1byte(String methodName) throws IOException {
-    final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
-        .numDataNodes(REPLICATION).format(true).build();
-    final FileSystem dfs = cluster.getFileSystem();
-    try {
-      final Path p = new Path("/" + methodName + "/foo");
-      final FSDataOutputStream out = createFile(dfs, p);
-      out.write(1);
-      out.close();
-      
-      final FSDataInputStream in = dfs.open(p);
-      final int b = in.read();
-      in.close();
-      Assert.assertEquals(1, b);
-    }
-    finally {
-      dfs.close();
-      cluster.shutdown();
-    }
-  }
-
-   private static void runPipelineCloseTest(String methodName,
+  private static void runPipelineCloseTest(String methodName,
       Action<DatanodeID, IOException> a) throws IOException {
     FiTestUtil.LOG.info("Running " + methodName + " ...");
     final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
         .initTest();
     t.fiPipelineClose.set(a);
-    write1byte(methodName);
+    TestFiDataTransferProtocol.write1byte(methodName);
   }
 
   /**
@@ -123,4 +76,175 @@ public class TestFiPipelineClose {
     final String methodName = FiTestUtil.getMethodName();
     runPipelineCloseTest(methodName, new SleepAction(methodName, 2, 0));
   }
+
+  private static void run41_43(String name, int i) throws IOException {
+    runPipelineCloseTest(name, new SleepAction(name, i, 3000));
+  }
+
+  private static void runPipelineCloseAck(String name, int i, DataNodeAction a
+      ) throws IOException {
+    FiTestUtil.LOG.info("Running " + name + " ...");
+    final DataTransferTest t = (DataTransferTest)DataTransferTestUtil.initTest();
+    final MarkerConstraint marker = new MarkerConstraint(name);
+    t.fiPipelineClose.set(new DatanodeMarkingAction(name, i, marker));
+    t.fiPipelineAck.set(new ConstraintSatisfactionAction<DatanodeID, IOException>(a,
marker));
+    TestFiDataTransferProtocol.write1byte(name);
+  }
+
+  private static void run39_40(String name, int i) throws IOException {
+    runPipelineCloseAck(name, i, new SleepAction(name, i, 0));
+  }
+
+  /**
+   * Pipeline close:
+   * DN1 never responses after received close ack DN2.
+   * Client gets an IOException and determine DN1 bad.
+   */
+  @Test
+  public void pipeline_Fi_39() throws IOException {
+    run39_40(FiTestUtil.getMethodName(), 1);
+  }
+
+  /**
+   * Pipeline close:
+   * DN0 never responses after received close ack DN1.
+   * Client gets an IOException and determine DN0 bad.
+   */
+  @Test
+  public void pipeline_Fi_40() throws IOException {
+    run39_40(FiTestUtil.getMethodName(), 0);
+  }
+  
+  /**
+   * Pipeline close with DN0 very slow but it won't lead to timeout.
+   * Client finishes close successfully.
+   */
+  @Test
+  public void pipeline_Fi_41() throws IOException {
+    run41_43(FiTestUtil.getMethodName(), 0);
+  }
+
+  /**
+   * Pipeline close with DN1 very slow but it won't lead to timeout.
+   * Client finishes close successfully.
+   */
+  @Test
+  public void pipeline_Fi_42() throws IOException {
+    run41_43(FiTestUtil.getMethodName(), 1);
+  }
+
+  /**
+   * Pipeline close with DN2 very slow but it won't lead to timeout.
+   * Client finishes close successfully.
+   */
+  @Test
+  public void pipeline_Fi_43() throws IOException {
+    run41_43(FiTestUtil.getMethodName(), 2);
+  }
+
+  /**
+   * Pipeline close:
+   * DN0 throws an OutOfMemoryException
+   * right after it received a close request from client.
+   * Client gets an IOException and determine DN0 bad.
+   */
+  @Test
+  public void pipeline_Fi_44() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runPipelineCloseTest(methodName, new OomAction(methodName, 0));
+  }
+
+  /**
+   * Pipeline close:
+   * DN1 throws an OutOfMemoryException
+   * right after it received a close request from client.
+   * Client gets an IOException and determine DN1 bad.
+   */
+  @Test
+  public void pipeline_Fi_45() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runPipelineCloseTest(methodName, new OomAction(methodName, 1));
+  }
+
+  /**
+   * Pipeline close:
+   * DN2 throws an OutOfMemoryException
+   * right after it received a close request from client.
+   * Client gets an IOException and determine DN2 bad.
+   */
+  @Test
+  public void pipeline_Fi_46() throws IOException {
+    final String methodName = FiTestUtil.getMethodName();
+    runPipelineCloseTest(methodName, new OomAction(methodName, 2));
+  }
+
+  private static void run47_48(String name, int i) throws IOException {
+    runPipelineCloseAck(name, i, new OomAction(name, i));
+  }
+
+  /**
+   * Pipeline close:
+   * DN1 throws an OutOfMemoryException right after
+   * it received a close ack from DN2.
+   * Client gets an IOException and determine DN1 bad.
+   */
+  @Test
+  public void pipeline_Fi_47() throws IOException {
+    run47_48(FiTestUtil.getMethodName(), 1);
+  }
+
+  /**
+   * Pipeline close:
+   * DN0 throws an OutOfMemoryException right after
+   * it received a close ack from DN1.
+   * Client gets an IOException and determine DN0 bad.
+   */
+  @Test
+  public void pipeline_Fi_48() throws IOException {
+    run47_48(FiTestUtil.getMethodName(), 0);
+  }
+
+  private static void runBlockFileCloseTest(String methodName,
+      Action<DatanodeID, IOException> a) throws IOException {
+    FiTestUtil.LOG.info("Running " + methodName + " ...");
+    final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
+        .initTest();
+    t.fiBlockFileClose.set(a);
+    TestFiDataTransferProtocol.write1byte(methodName);
+  }
+
+  private static void run49_51(String name, int i) throws IOException {
+    runBlockFileCloseTest(name, new IoeAction(name, i, "DISK ERROR"));
+  }
+
+  /**
+   * Pipeline close:
+   * DN0 throws a disk error exception when it is closing the block file.
+   * Client gets an IOException and determine DN0 bad.
+   */
+  @Test
+  public void pipeline_Fi_49() throws IOException {
+    run49_51(FiTestUtil.getMethodName(), 0);
+  }
+
+
+  /**
+   * Pipeline close:
+   * DN1 throws a disk error exception when it is closing the block file.
+   * Client gets an IOException and determine DN1 bad.
+   */
+  @Test
+  public void pipeline_Fi_50() throws IOException {
+    run49_51(FiTestUtil.getMethodName(), 1);
+  }
+
+  /**
+   * Pipeline close:
+   * DN2 throws a disk error exception when it is closing the block file.
+   * Client gets an IOException and determine DN2 bad.
+   */
+  @Test
+  public void pipeline_Fi_51() throws IOException {
+    run49_51(FiTestUtil.getMethodName(), 2);
+  }
 }

Propchange: hadoop/hdfs/branches/yahoo-merge/src/test/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat May 21 01:10:40 2011
@@ -3,4 +3,4 @@
 /hadoop/hdfs/branches/HDFS-1052/src/test/hdfs:1078924,1078943,1080331,1080391,1080402,1081603,1082326,1084245,1086788,1090419
 /hadoop/hdfs/branches/HDFS-265/src/test/hdfs:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/test/hdfs:820487
-/hadoop/hdfs/trunk/src/test/hdfs:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576
+/hadoop/hdfs/trunk/src/test/hdfs:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1090357,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576

Propchange: hadoop/hdfs/branches/yahoo-merge/src/webapps/datanode/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat May 21 01:10:40 2011
@@ -3,4 +3,4 @@
 /hadoop/hdfs/branches/HDFS-1052/src/webapps/datanode:1078924,1078943,1080331,1080391,1080402,1081603,1082326,1084245,1086788,1090419
 /hadoop/hdfs/branches/HDFS-265/src/webapps/datanode:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/webapps/datanode:820487
-/hadoop/hdfs/trunk/src/webapps/datanode:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576
+/hadoop/hdfs/trunk/src/webapps/datanode:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1090357,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576

Propchange: hadoop/hdfs/branches/yahoo-merge/src/webapps/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat May 21 01:10:40 2011
@@ -3,4 +3,4 @@
 /hadoop/hdfs/branches/HDFS-1052/src/webapps/hdfs:1078924,1078943,1080331,1080391,1080402,1081603,1082326,1084245,1086788,1090419
 /hadoop/hdfs/branches/HDFS-265/src/webapps/hdfs:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs:820487
-/hadoop/hdfs/trunk/src/webapps/hdfs:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576
+/hadoop/hdfs/trunk/src/webapps/hdfs:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1090357,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576

Propchange: hadoop/hdfs/branches/yahoo-merge/src/webapps/secondary/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat May 21 01:10:40 2011
@@ -3,4 +3,4 @@
 /hadoop/hdfs/branches/HDFS-1052/src/webapps/secondary:1078924,1078943,1080331,1080391,1080402,1081603,1082326,1084245,1086788,1090419
 /hadoop/hdfs/branches/HDFS-265/src/webapps/secondary:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/webapps/secondary:820487
-/hadoop/hdfs/trunk/src/webapps/secondary:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576
+/hadoop/hdfs/trunk/src/webapps/secondary:987665-1004788,1026178-1028906,1032470-1033639,1034073,1034082-1034181,1034501-1034544,1035508,1039957,1040005,1052823,1060619,1061067,1062020,1062045,1062052,1071518,1074282,1080095,1080380,1080836,1081580,1082263,1083951,1085509,1086654,1087080,1087115,1087437,1090357,1091619,1092584,1095245,1095789,1096846,1097648,1097969,1098867,1099640,1101324,1101753,1104395,1104407,1124576



Mime
View raw message