hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hair...@apache.org
Subject svn commit: r893067 - in /hadoop/hdfs/branches/branch-0.21: ./ .eclipse.templates/.launches/ src/contrib/ src/contrib/hdfsproxy/ src/java/ src/java/org/apache/hadoop/hdfs/protocol/ src/java/org/apache/hadoop/hdfs/server/datanode/ src/test/aop/org/apach...
Date Tue, 22 Dec 2009 04:44:13 GMT
Author: hairong
Date: Tue Dec 22 04:44:12 2009
New Revision: 893067

URL: http://svn.apache.org/viewvc?rev=893067&view=rev
Log:
Merge -c893066 to bring the change of HDFS-564 from trunk to brach 0.21

Added:
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol2.java
      - copied unchanged from r893066, hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol2.java
Modified:
    hadoop/hdfs/branches/branch-0.21/   (props changed)
    hadoop/hdfs/branches/branch-0.21/.eclipse.templates/.launches/   (props changed)
    hadoop/hdfs/branches/branch-0.21/CHANGES.txt   (contents, props changed)
    hadoop/hdfs/branches/branch-0.21/build.xml   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/contrib/build.xml   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/java/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java
  (props changed)
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
  (props changed)
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/FiTestUtil.java
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/protocol/   (props
changed)
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java
    hadoop/hdfs/branches/branch-0.21/src/test/hdfs/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/webapps/datanode/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/webapps/secondary/   (props changed)

Propchange: hadoop/hdfs/branches/branch-0.21/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/hdfs:713112
 /hadoop/hdfs/branches/HDFS-265:796829-820463
-/hadoop/hdfs/trunk:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Propchange: hadoop/hdfs/branches/branch-0.21/.eclipse.templates/.launches/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1 +1 @@
-/hadoop/hdfs/trunk/.eclipse.templates/.launches:817853-817863,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/.eclipse.templates/.launches:817853-817863,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Modified: hadoop/hdfs/branches/branch-0.21/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/CHANGES.txt?rev=893067&r1=893066&r2=893067&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/CHANGES.txt (original)
+++ hadoop/hdfs/branches/branch-0.21/CHANGES.txt Tue Dec 22 04:44:12 2009
@@ -500,6 +500,8 @@
     HDFS-724. Pipeline hangs if one of the block receiver is not responsive.
     (hairong)
 
+    HDFS-564. Adding pipeline tests 17-35. (hairong)
+
 Release 0.20.2 - Unreleased
 
   IMPROVEMENTS

Propchange: hadoop/hdfs/branches/branch-0.21/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/hdfs/CHANGES.txt:713112
 /hadoop/hdfs/branches/HDFS-265/CHANGES.txt:796829-820463
-/hadoop/hdfs/trunk/CHANGES.txt:817853-817863,818294-818298,818801,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/CHANGES.txt:817853-817863,818294-818298,818801,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Propchange: hadoop/hdfs/branches/branch-0.21/build.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/build.xml:713112
 /hadoop/core/trunk/build.xml:779102
 /hadoop/hdfs/branches/HDFS-265/build.xml:796829-820463
-/hadoop/hdfs/trunk/build.xml:817853-817863,818294-818298,818801,824552,824944,825229,826149,828116,828926,829258,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/build.xml:817853-817863,818294-818298,818801,824552,824944,825229,826149,828116,828926,829258,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Propchange: hadoop/hdfs/branches/branch-0.21/src/contrib/build.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/contrib/build.xml:713112
 /hadoop/hdfs/branches/HDFS-265/src/contrib/build.xml:796829-820463
-/hadoop/hdfs/trunk/src/contrib/build.xml:817853-817863,818294-818298,818801,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/src/contrib/build.xml:817853-817863,818294-818298,818801,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Propchange: hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/contrib/hdfsproxy:713112
 /hadoop/core/trunk/src/contrib/hdfsproxy:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/contrib/hdfsproxy:796829-820463
-/hadoop/hdfs/trunk/src/contrib/hdfsproxy:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/src/contrib/hdfsproxy:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Propchange: hadoop/hdfs/branches/branch-0.21/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/java:713112
 /hadoop/core/trunk/src/hdfs:776175-785643,785929-786278
 /hadoop/hdfs/branches/HDFS-265/src/java:796829-820463
-/hadoop/hdfs/trunk/src/java:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/src/java:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Propchange: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,5 +1,5 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/RecoveryInProgressException.java:713112
 /hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/RecoveryInProgressException.java:776175-785643,785929-786278
 /hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/RecoveryInProgressException.java:817353-818319,818321-818553
-/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java:817853-817863,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java:817853-817863,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066
 /hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/RecoveryInProgressException.java:796829-800617,800619-803337,804756-805652,808672-809439,811495-813103,813105-813630,814223-815964,818294-818298

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java?rev=893067&r1=893066&r2=893067&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
Tue Dec 22 04:44:12 2009
@@ -432,6 +432,14 @@
     return receivePacket(offsetInBlock, seqno, lastPacketInBlock, len, endOfHeader);
   }
 
+  /**
+   * Write the received packet to disk (data only)
+   */
+  private void writePacketToDisk(byte[] pktBuf, int startByteToDisk, 
+      int numBytesToDisk) throws IOException {
+    out.write(pktBuf, startByteToDisk, numBytesToDisk);
+  }
+  
   /** 
    * Receives and processes a packet. It can contain many chunks.
    * returns the number of data bytes that the packet has.
@@ -524,7 +532,7 @@
 
           int startByteToDisk = dataOff+(int)(onDiskLen-firstByteInBlock);
           int numBytesToDisk = (int)(offsetInBlock-onDiskLen);
-          out.write(pktBuf, startByteToDisk, numBytesToDisk);
+          writePacketToDisk(pktBuf, startByteToDisk, numBytesToDisk);
 
           // If this is a partial chunk, then verify that this is the only
           // chunk in the packet. Calculate new crc for this chunk.

Propchange: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -3,4 +3,4 @@
 /hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java:776175-785643,785929-786278
 /hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:776175-785643,785929-786278
 /hadoop/hdfs/branches/HDFS-265/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:796829-820463
-/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java?rev=893067&r1=893066&r2=893067&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
Tue Dec 22 04:44:12 2009
@@ -59,30 +59,36 @@
     private volatile boolean isSuccess = false;
 
     /** Simulate action for the receiverOpWriteBlock pointcut */
-    public final ActionContainer<DatanodeID> fiReceiverOpWriteBlock
-        = new ActionContainer<DatanodeID>();
+    public final ActionContainer<DatanodeID, IOException> fiReceiverOpWriteBlock
+        = new ActionContainer<DatanodeID, IOException>();
     /** Simulate action for the callReceivePacket pointcut */
-    public final ActionContainer<DatanodeID> fiCallReceivePacket
-        = new ActionContainer<DatanodeID>();
+    public final ActionContainer<DatanodeID, IOException> fiCallReceivePacket
+        = new ActionContainer<DatanodeID, IOException>();
+    /** Simulate action for the callWritePacketToDisk pointcut */
+    public final ActionContainer<DatanodeID, IOException> fiCallWritePacketToDisk
+        = new ActionContainer<DatanodeID, IOException>();
     /** Simulate action for the statusRead pointcut */
-    public final ActionContainer<DatanodeID> fiStatusRead
-        = new ActionContainer<DatanodeID>();
+    public final ActionContainer<DatanodeID, IOException> fiStatusRead
+        = new ActionContainer<DatanodeID, IOException>();
+    /** Simulate action for the afterDownstreamStatusRead pointcut */
+    public final ActionContainer<DatanodeID, IOException> fiAfterDownstreamStatusRead
+        = new ActionContainer<DatanodeID, IOException>();
     /** Simulate action for the pipelineAck pointcut */
-    public final ActionContainer<DatanodeID> fiPipelineAck
-        = new ActionContainer<DatanodeID>();
+    public final ActionContainer<DatanodeID, IOException> fiPipelineAck
+        = new ActionContainer<DatanodeID, IOException>();
     /** Simulate action for the pipelineClose pointcut */
-    public final ActionContainer<DatanodeID> fiPipelineClose
-        = new ActionContainer<DatanodeID>();
+    public final ActionContainer<DatanodeID, IOException> fiPipelineClose
+        = new ActionContainer<DatanodeID, IOException>();
     /** Simulate action for the blockFileClose pointcut */
-    public final ActionContainer<DatanodeID> fiBlockFileClose
-        = new ActionContainer<DatanodeID>();
+    public final ActionContainer<DatanodeID, IOException> fiBlockFileClose
+        = new ActionContainer<DatanodeID, IOException>();
 
     /** Verification action for the pipelineInitNonAppend pointcut */
-    public final ActionContainer<Integer> fiPipelineInitErrorNonAppend
-        = new ActionContainer<Integer>();
+    public final ActionContainer<Integer, RuntimeException> fiPipelineInitErrorNonAppend
+        = new ActionContainer<Integer, RuntimeException>();
     /** Verification action for the pipelineErrorAfterInit pointcut */
-    public final ActionContainer<Integer> fiPipelineErrorAfterInit
-        = new ActionContainer<Integer>();
+    public final ActionContainer<Integer, RuntimeException> fiPipelineErrorAfterInit
+        = new ActionContainer<Integer, RuntimeException>();
 
     /** Get test status */
     public boolean isSuccess() {
@@ -121,7 +127,8 @@
   }
 
   /** Action for DataNode */
-  public static abstract class DataNodeAction implements Action<DatanodeID> {
+  public static abstract class DataNodeAction implements
+      Action<DatanodeID, IOException> {
     /** The name of the test */
     final String currentTest;
     /** The index of the datanode */
@@ -195,6 +202,28 @@
     }
   }
 
+  /** Throws OutOfMemoryError if the count is zero. */
+  public static class CountdownOomAction extends OomAction {
+    private final CountdownConstraint countdown;
+
+    /** Create an action for datanode i in the pipeline with count down. */
+    public CountdownOomAction(String currentTest, int i, int count) {
+      super(currentTest, i);
+      countdown = new CountdownConstraint(count);
+    }
+
+    @Override
+    public void run(DatanodeID id) {
+      final DataTransferTest test = getDataTransferTest();
+      final Pipeline p = test.getPipeline(id);
+      if (p.contains(index, id) && countdown.isSatisfied()) {
+        final String s = toString(id);
+        FiTestUtil.LOG.info(s);
+        throw new OutOfMemoryError(s);
+      }
+    }
+  }
+
   /** Throws DiskOutOfSpaceException. */
   public static class DoosAction extends DataNodeAction {
     /** Create an action for datanode i in the pipeline. */
@@ -242,6 +271,28 @@
     }
   }
 
+  /** Throws DiskOutOfSpaceException if the count is zero. */
+  public static class CountdownDoosAction extends DoosAction {
+    private final CountdownConstraint countdown;
+
+    /** Create an action for datanode i in the pipeline with count down. */
+    public CountdownDoosAction(String currentTest, int i, int count) {
+      super(currentTest, i);
+      countdown = new CountdownConstraint(count);
+    }
+
+    @Override
+    public void run(DatanodeID id) throws DiskOutOfSpaceException {
+      final DataTransferTest test = getDataTransferTest();
+      final Pipeline p = test.getPipeline(id);
+      if (p.contains(index, id) && countdown.isSatisfied()) {
+        final String s = toString(id);
+        FiTestUtil.LOG.info(s);
+        throw new DiskOutOfSpaceException(s);
+      }
+    }
+  }
+
   /**
    * Sleep some period of time so that it slows down the datanode
    * or sleep forever so that datanode becomes not responding.
@@ -307,8 +358,50 @@
     }
   }
 
+  /**
+   * When the count is zero,
+   * sleep some period of time so that it slows down the datanode
+   * or sleep forever so that datanode becomes not responding.
+   */
+  public static class CountdownSleepAction extends SleepAction {
+    private final CountdownConstraint countdown;
+
+    /**
+     * Create an action for datanode i in the pipeline.
+     * @param duration In milliseconds, duration <= 0 means sleeping forever.
+     */
+    public CountdownSleepAction(String currentTest, int i,
+        long duration, int count) {
+      this(currentTest, i, duration, duration+1, count);
+    }
+
+    /** Create an action for datanode i in the pipeline with count down. */
+    public CountdownSleepAction(String currentTest, int i,
+        long minDuration, long maxDuration, int count) {
+      super(currentTest, i, minDuration, maxDuration);
+      countdown = new CountdownConstraint(count);
+    }
+
+    @Override
+    public void run(DatanodeID id) {
+      final DataTransferTest test = getDataTransferTest();
+      final Pipeline p = test.getPipeline(id);
+      if (p.contains(index, id) && countdown.isSatisfied()) {
+        final String s = toString(id) + ", duration = ["
+        + minDuration + "," + maxDuration + ")";
+        FiTestUtil.LOG.info(s);
+        if (maxDuration <= 1) {
+          for(; true; FiTestUtil.sleep(1000)); //sleep forever
+        } else {
+          FiTestUtil.sleep(minDuration, maxDuration);
+        }
+      }
+    }
+  }
+
   /** Action for pipeline error verification */
-  public static class VerificationAction implements Action<Integer> {
+  public static class VerificationAction implements
+      Action<Integer, RuntimeException> {
     /** The name of the test */
     final String currentTest;
     /** The error index of the datanode */
@@ -343,9 +436,10 @@
    *  Create a OomAction with a CountdownConstraint
    *  so that it throws OutOfMemoryError if the count is zero.
    */
-  public static ConstraintSatisfactionAction<DatanodeID> createCountdownOomAction(
-      String currentTest, int i, int count) {
-    return new ConstraintSatisfactionAction<DatanodeID>(
+  public static ConstraintSatisfactionAction<DatanodeID, IOException>
+      createCountdownOomAction(
+        String currentTest, int i, int count) {
+    return new ConstraintSatisfactionAction<DatanodeID, IOException>(
         new OomAction(currentTest, i), new CountdownConstraint(count));
   }
 
@@ -353,9 +447,10 @@
    *  Create a DoosAction with a CountdownConstraint
    *  so that it throws DiskOutOfSpaceException if the count is zero.
    */
-  public static ConstraintSatisfactionAction<DatanodeID> createCountdownDoosAction(
+  public static ConstraintSatisfactionAction<DatanodeID, IOException>
+    createCountdownDoosAction(
       String currentTest, int i, int count) {
-    return new ConstraintSatisfactionAction<DatanodeID>(
+    return new ConstraintSatisfactionAction<DatanodeID, IOException>(
         new DoosAction(currentTest, i), new CountdownConstraint(count));
   }
 
@@ -366,9 +461,9 @@
    * sleep some period of time so that it slows down the datanode
    * or sleep forever so the that datanode becomes not responding.
    */
-  public static ConstraintSatisfactionAction<DatanodeID> createCountdownSleepAction(
+  public static ConstraintSatisfactionAction<DatanodeID, IOException> createCountdownSleepAction(
       String currentTest, int i, long minDuration, long maxDuration, int count) {
-    return new ConstraintSatisfactionAction<DatanodeID>(
+    return new ConstraintSatisfactionAction<DatanodeID, IOException>(
         new SleepAction(currentTest, i, minDuration, maxDuration),
         new CountdownConstraint(count));
   }
@@ -377,7 +472,7 @@
    * Same as
    * createCountdownSleepAction(currentTest, i, duration, duration+1, count).
    */
-  public static ConstraintSatisfactionAction<DatanodeID> createCountdownSleepAction(
+  public static ConstraintSatisfactionAction<DatanodeID, IOException> createCountdownSleepAction(
       String currentTest, int i, long duration, int count) {
     return createCountdownSleepAction(currentTest, i, duration, duration+1,
         count);

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java?rev=893067&r1=893066&r2=893067&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java
Tue Dec 22 04:44:12 2009
@@ -59,9 +59,9 @@
   
   /** Class adds new type of action */
   public static class HFlushTest extends DataTransferTest {
-    public final ActionContainer<DatanodeID> fiCallHFlush = 
-      new ActionContainer<DatanodeID>();
-    public final ActionContainer<Integer> fiErrorOnCallHFlush = 
-      new ActionContainer<Integer>();
+    public final ActionContainer<DatanodeID, IOException> fiCallHFlush = 
+      new ActionContainer<DatanodeID, IOException>();
+    public final ActionContainer<Integer, RuntimeException> fiErrorOnCallHFlush = 
+      new ActionContainer<Integer, RuntimeException>();
   }
 }
\ No newline at end of file

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/FiTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/FiTestUtil.java?rev=893067&r1=893066&r2=893067&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/FiTestUtil.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/fi/FiTestUtil.java Tue
Dec 22 04:44:12 2009
@@ -17,7 +17,8 @@
  */
 package org.apache.hadoop.fi;
 
-import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 import java.util.Random;
 
 import org.apache.commons.logging.Log;
@@ -95,24 +96,23 @@
   }
 
   /** Action interface */
-  public static interface Action<T> {
+  public static interface Action<T, E extends Exception> {
     /** Run the action with the parameter. */
-    public void run(T parameter) throws IOException;
+    public void run(T parameter) throws E;
   }
 
   /** An ActionContainer contains at most one action. */
-  public static class ActionContainer<T> {
-    private Action<T> action;
-
+  public static class ActionContainer<T, E extends Exception> {
+    private List<Action<T, E>> actionList = new ArrayList<Action<T, E>>();
     /** Create an empty container. */
     public ActionContainer() {}
 
     /** Set action. */
-    public void set(Action<T> a) {action = a;}
+    public void set(Action<T, E> a) {actionList.add(a);}
 
     /** Run the action if it exists. */
-    public void run(T obj) throws IOException {
-      if (action != null) {
+    public void run(T obj) throws E {
+      for (Action<T, E> action : actionList) {
         action.run(obj);
       }
     }
@@ -147,13 +147,14 @@
   }
   
   /** An action is fired if all the constraints are satisfied. */
-  public static class ConstraintSatisfactionAction<T> implements Action<T> {
-    private final Action<T> action;
+  public static class ConstraintSatisfactionAction<T, E extends Exception> 
+      implements Action<T, E> {
+    private final Action<T, E> action;
     private final Constraint[] constraints;
     
     /** Constructor */
     public ConstraintSatisfactionAction(
-        Action<T> action, Constraint... constraints) {
+        Action<T, E> action, Constraint... constraints) {
       this.action = action;
       this.constraints = constraints;
     }
@@ -163,7 +164,7 @@
      * Short-circuit-and is used. 
      */
     @Override
-    public final void run(T parameter) throws IOException {
+    public final void run(T parameter) throws E {
       for(Constraint c : constraints) {
         if (!c.isSatisfied()) {
           return;

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj?rev=893067&r1=893066&r2=893067&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
Tue Dec 22 04:44:12 2009
@@ -17,8 +17,6 @@
  */
 package org.apache.hadoop.hdfs;
 
-import java.io.IOException;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fi.DataTransferTestUtil;
@@ -49,14 +47,10 @@
   after(DataStreamer datastreamer) returning : pipelineInitNonAppend(datastreamer) {
     LOG.info("FI: after pipelineInitNonAppend: hasError="
         + datastreamer.hasError + " errorIndex=" + datastreamer.errorIndex);
-    try {
-      if (datastreamer.hasError) {
-        DataTransferTest dtTest = DataTransferTestUtil.getDataTransferTest();
-        if (dtTest != null )
-          dtTest.fiPipelineInitErrorNonAppend.run(datastreamer.errorIndex);
-      }
-    } catch (IOException e) {
-      throw new RuntimeException(e);
+    if (datastreamer.hasError) {
+      DataTransferTest dtTest = DataTransferTestUtil.getDataTransferTest();
+      if (dtTest != null)
+        dtTest.fiPipelineInitErrorNonAppend.run(datastreamer.errorIndex);
     }
   }
 
@@ -78,13 +72,9 @@
   before(DataStreamer datastreamer) : pipelineErrorAfterInit(datastreamer) {
     LOG.info("FI: before pipelineErrorAfterInit: errorIndex="
         + datastreamer.errorIndex);
-    try {
-      DataTransferTest dtTest = DataTransferTestUtil.getDataTransferTest();
-      if (dtTest != null )
-        dtTest.fiPipelineErrorAfterInit.run(datastreamer.errorIndex);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
+    DataTransferTest dtTest = DataTransferTestUtil.getDataTransferTest();
+    if (dtTest != null )
+      dtTest.fiPipelineErrorAfterInit.run(datastreamer.errorIndex);
   }
 
   pointcut pipelineClose(DFSOutputStream out):

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java?rev=893067&r1=893066&r2=893067&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java
Tue Dec 22 04:44:12 2009
@@ -39,7 +39,7 @@
   /**
    * Storing acknowleged bytes num. action for fault injection tests
    */
-  public static class ReceivedCheckAction implements FiTestUtil.Action<NodeBytes> {
+  public static class ReceivedCheckAction implements FiTestUtil.Action<NodeBytes, IOException>
{
     String name;
     LinkedList<NodeBytes> rcv = ((PipelinesTest) getPipelineTest()).received;
     LinkedList<NodeBytes> ack = ((PipelinesTest) getPipelineTest()).acked;
@@ -77,7 +77,7 @@
   /**
    * Storing acknowleged bytes num. action for fault injection tests
    */
-  public static class AckedCheckAction implements FiTestUtil.Action<NodeBytes> {
+  public static class AckedCheckAction implements FiTestUtil.Action<NodeBytes, IOException>
{
     String name;
     LinkedList<NodeBytes> rcv = ((PipelinesTest) getPipelineTest()).received;
     LinkedList<NodeBytes> ack = ((PipelinesTest) getPipelineTest()).acked;
@@ -118,10 +118,10 @@
     LinkedList<NodeBytes> received = new LinkedList<NodeBytes>();
     LinkedList<NodeBytes> acked = new LinkedList<NodeBytes>();
 
-    public final ActionContainer<NodeBytes> fiCallSetNumBytes =
-      new ActionContainer<NodeBytes>();
-    public final ActionContainer<NodeBytes> fiCallSetBytesAcked =
-      new ActionContainer<NodeBytes>();
+    public final ActionContainer<NodeBytes, IOException> fiCallSetNumBytes =
+      new ActionContainer<NodeBytes, IOException>();
+    public final ActionContainer<NodeBytes, IOException> fiCallSetBytesAcked =
+      new ActionContainer<NodeBytes, IOException>();
     
     private static boolean suspend = false;
     private static long lastQueuedPacket = -1;

Propchange: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/protocol/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1 +1 @@
-/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/protocol:817853-817863,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/protocol:817853-817863,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj?rev=893067&r1=893066&r2=893067&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
Tue Dec 22 04:44:12 2009
@@ -25,6 +25,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fi.DataTransferTestUtil;
+import org.apache.hadoop.fi.Pipeline;
 import org.apache.hadoop.fi.PipelineTest;
 import org.apache.hadoop.fi.ProbabilityModel;
 import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest;
@@ -44,12 +45,7 @@
   public static final Log LOG = LogFactory.getLog(BlockReceiverAspects.class);
 
   pointcut callReceivePacket(BlockReceiver blockreceiver) :
-    call (* OutputStream.write(..))
-      && withincode (* BlockReceiver.receivePacket(..))
-// to further limit the application of this aspect a very narrow 'target' can be used as
follows
-//  && target(DataOutputStream)
-      && !within(BlockReceiverAspects +)
-      && this(blockreceiver);
+    call(* receivePacket(..)) && target(blockreceiver);
 	
   before(BlockReceiver blockreceiver
       ) throws IOException : callReceivePacket(blockreceiver) {
@@ -67,7 +63,30 @@
     }
   }
   
-  // Pointcuts and advises for TestFiPipelines  
+  pointcut callWritePacketToDisk(BlockReceiver blockreceiver) :
+    call(* writePacketToDisk(..)) && target(blockreceiver);
+
+  before(BlockReceiver blockreceiver
+      ) throws IOException : callWritePacketToDisk(blockreceiver) {
+    LOG.info("FI: callWritePacketToDisk");
+    DataTransferTest dtTest = DataTransferTestUtil.getDataTransferTest();
+    if (dtTest != null)
+      dtTest.fiCallWritePacketToDisk.run(
+          blockreceiver.getDataNode().getDatanodeRegistration());
+  }
+
+  pointcut afterDownstreamStatusRead(BlockReceiver.PacketResponder responder):
+    call(void PipelineAck.readFields(DataInput)) && this(responder);
+
+  after(BlockReceiver.PacketResponder responder)
+      throws IOException: afterDownstreamStatusRead(responder) {
+    final DataNode d = responder.receiver.getDataNode();
+    DataTransferTest dtTest = DataTransferTestUtil.getDataTransferTest();
+    if (dtTest != null)
+      dtTest.fiAfterDownstreamStatusRead.run(d.getDatanodeRegistration());
+  }
+
+    // Pointcuts and advises for TestFiPipelines  
   pointcut callSetNumBytes(BlockReceiver br, long offset) : 
     call (void ReplicaInPipelineInterface.setNumBytes(long)) 
     && withincode (int BlockReceiver.receivePacket(long, long, boolean, int, int))

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java?rev=893067&r1=893066&r2=893067&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiDataTransferProtocol.java
Tue Dec 22 04:44:12 2009
@@ -19,6 +19,7 @@
 
 import java.io.IOException;
 
+import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fi.DataTransferTestUtil;
 import org.apache.hadoop.fi.FiTestUtil;
@@ -101,7 +102,7 @@
   }
   
   private static void runReceiverOpWriteBlockTest(String methodName,
-      int errorIndex, Action<DatanodeID> a) throws IOException {
+      int errorIndex, Action<DatanodeID, IOException> a) throws IOException {
     FiTestUtil.LOG.info("Running " + methodName + " ...");
     final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
         .initTest();
@@ -113,7 +114,7 @@
   }
   
   private static void runStatusReadTest(String methodName, int errorIndex,
-      Action<DatanodeID> a) throws IOException {
+      Action<DatanodeID, IOException> a) throws IOException {
     FiTestUtil.LOG.info("Running " + methodName + " ...");
     final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
         .initTest();
@@ -124,11 +125,11 @@
     Assert.assertTrue(t.isSuccess());
   }
 
-  private static void runCallReceivePacketTest(String methodName,
-      int errorIndex, Action<DatanodeID> a) throws IOException {
+  private static void runCallWritePacketToDisk(String methodName,
+      int errorIndex, Action<DatanodeID, IOException> a) throws IOException {
     FiTestUtil.LOG.info("Running " + methodName + " ...");
     final DataTransferTest t = (DataTransferTest)DataTransferTestUtil.initTest();
-    t.fiCallReceivePacket.set(a);
+    t.fiCallWritePacketToDisk.set(a);
     t.fiPipelineErrorAfterInit.set(new VerificationAction(methodName, errorIndex));
     write1byte(methodName);
     Assert.assertTrue(t.isSuccess());
@@ -280,7 +281,7 @@
   @Test
   public void pipeline_Fi_14() throws IOException {
     final String methodName = FiTestUtil.getMethodName();
-    runCallReceivePacketTest(methodName, 0, new DoosAction(methodName, 0));
+    runCallWritePacketToDisk(methodName, 0, new DoosAction(methodName, 0));
   }
 
   /**
@@ -291,7 +292,7 @@
   @Test
   public void pipeline_Fi_15() throws IOException {
     final String methodName = FiTestUtil.getMethodName();
-    runCallReceivePacketTest(methodName, 1, new DoosAction(methodName, 1));
+    runCallWritePacketToDisk(methodName, 1, new DoosAction(methodName, 1));
   }
   
   /**
@@ -302,11 +303,11 @@
   @Test
   public void pipeline_Fi_16() throws IOException {
     final String methodName = FiTestUtil.getMethodName();
-    runCallReceivePacketTest(methodName, 2, new DoosAction(methodName, 2));
+    runCallWritePacketToDisk(methodName, 2, new DoosAction(methodName, 2));
   }
 
   private static void runPipelineCloseTest(String methodName,
-      Action<DatanodeID> a) throws IOException {
+      Action<DatanodeID, IOException> a) throws IOException {
     FiTestUtil.LOG.info("Running " + methodName + " ...");
     final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
         .initTest();
@@ -324,7 +325,7 @@
     final DataTransferTest t = (DataTransferTest)DataTransferTestUtil.initTest();
     final MarkerConstraint marker = new MarkerConstraint(name);
     t.fiPipelineClose.set(new DatanodeMarkingAction(name, i, marker));
-    t.fiPipelineAck.set(new ConstraintSatisfactionAction<DatanodeID>(a, marker));
+    t.fiPipelineAck.set(new ConstraintSatisfactionAction<DatanodeID, IOException>(a,
marker));
     write1byte(name);
   }
 
@@ -442,7 +443,7 @@
   }
 
   private static void runBlockFileCloseTest(String methodName,
-      Action<DatanodeID> a) throws IOException {
+      Action<DatanodeID, IOException> a) throws IOException {
     FiTestUtil.LOG.info("Running " + methodName + " ...");
     final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
         .initTest();

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java?rev=893067&r1=893066&r2=893067&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/TestFiPipelineClose.java
Tue Dec 22 04:44:12 2009
@@ -83,7 +83,7 @@
   }
 
    private static void runPipelineCloseTest(String methodName,
-      Action<DatanodeID> a) throws IOException {
+      Action<DatanodeID, IOException> a) throws IOException {
     FiTestUtil.LOG.info("Running " + methodName + " ...");
     final DataTransferTest t = (DataTransferTest) DataTransferTestUtil
         .initTest();

Propchange: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/test/hdfs:713112
 /hadoop/core/trunk/src/test/hdfs:776175-785643
 /hadoop/hdfs/branches/HDFS-265/src/test/hdfs:796829-820463
-/hadoop/hdfs/trunk/src/test/hdfs:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/src/test/hdfs:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/datanode/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/webapps/datanode:713112
 /hadoop/core/trunk/src/webapps/datanode:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/datanode:796829-820463
-/hadoop/hdfs/trunk/src/webapps/datanode:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/src/webapps/datanode:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/webapps/hdfs:713112
 /hadoop/core/trunk/src/webapps/hdfs:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/hdfs:796829-820463
-/hadoop/hdfs/trunk/src/webapps/hdfs:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/src/webapps/hdfs:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066

Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/secondary/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Dec 22 04:44:12 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/webapps/secondary:713112
 /hadoop/core/trunk/src/webapps/secondary:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/secondary:796829-820463
-/hadoop/hdfs/trunk/src/webapps/secondary:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993
+/hadoop/hdfs/trunk/src/webapps/secondary:817853-817863,818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,831455-831490,832043,833499,835728,880971,881014,881017,884432,888084,888507,888519,889002,890655,891106,892993,893066



Mime
View raw message