hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From c..@apache.org
Subject svn commit: r832049 - in /hadoop/hdfs/branches/branch-0.21: ./ .eclipse.templates/.launches/ src/contrib/block_forensics/ src/contrib/hdfsproxy/ src/java/ src/java/org/apache/hadoop/hdfs/protocol/ src/java/org/apache/hadoop/hdfs/server/datanode/ src/te...
Date Mon, 02 Nov 2009 18:56:46 GMT
Author: cos
Date: Mon Nov  2 18:56:45 2009
New Revision: 832049

URL: http://svn.apache.org/viewvc?rev=832049&view=rev
Log:
HDFS-521. svn merge -c 832043 from trunk to branch.0.21 (cos)

Added:
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java
      - copied unchanged from r832043, hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/PipelinesTestUtil.java
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/TestFiPipelines.java
      - copied unchanged from r832043, hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/TestFiPipelines.java
    hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestPipelines.java
      - copied unchanged from r832043, hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestPipelines.java
Modified:
    hadoop/hdfs/branches/branch-0.21/   (props changed)
    hadoop/hdfs/branches/branch-0.21/.eclipse.templates/.launches/   (props changed)
    hadoop/hdfs/branches/branch-0.21/CHANGES.txt
    hadoop/hdfs/branches/branch-0.21/build.xml   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/contrib/block_forensics/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/java/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java
  (props changed)
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
  (props changed)
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/protocol/   (props
changed)
    hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
    hadoop/hdfs/branches/branch-0.21/src/test/hdfs/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestReadWhileWriting.java
    hadoop/hdfs/branches/branch-0.21/src/webapps/datanode/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs/   (props changed)
    hadoop/hdfs/branches/branch-0.21/src/webapps/secondary/   (props changed)

Propchange: hadoop/hdfs/branches/branch-0.21/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/hdfs:713112
 /hadoop/hdfs/branches/HDFS-265:796829-820463
-/hadoop/hdfs/trunk:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,832043

Propchange: hadoop/hdfs/branches/branch-0.21/.eclipse.templates/.launches/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1 +1 @@
-/hadoop/hdfs/trunk/.eclipse.templates/.launches:824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/.eclipse.templates/.launches:824552,824944,826149,828116,828926,829880,829894,830003,831436,832043

Modified: hadoop/hdfs/branches/branch-0.21/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/CHANGES.txt?rev=832049&r1=832048&r2=832049&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/CHANGES.txt (original)
+++ hadoop/hdfs/branches/branch-0.21/CHANGES.txt Mon Nov  2 18:56:45 2009
@@ -294,6 +294,8 @@
     HDFS-736. commitBlockSynchronization() updates block GS and length in-place.
     (shv)
 
+    HDFS-521. Create new tests for pipeline (cos)
+
   BUG FIXES
 
     HDFS-76. Better error message to users when commands fail because of 

Propchange: hadoop/hdfs/branches/branch-0.21/build.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/build.xml:713112
 /hadoop/core/trunk/build.xml:779102
 /hadoop/hdfs/branches/HDFS-265/build.xml:796829-820463
-/hadoop/hdfs/trunk/build.xml:818294-818298,824552,824944,825229,826149,828116,828926,829258,829880,829894,830003,831436
+/hadoop/hdfs/trunk/build.xml:818294-818298,824552,824944,825229,826149,828116,828926,829258,829880,829894,830003,831436,832043

Propchange: hadoop/hdfs/branches/branch-0.21/src/contrib/block_forensics/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1 +1 @@
-/hadoop/hdfs/trunk/src/contrib/block_forensics:824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/src/contrib/block_forensics:824552,824944,826149,828116,828926,829880,829894,830003,831436,832043

Propchange: hadoop/hdfs/branches/branch-0.21/src/contrib/hdfsproxy/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/contrib/hdfsproxy:713112
 /hadoop/core/trunk/src/contrib/hdfsproxy:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/contrib/hdfsproxy:796829-820463
-/hadoop/hdfs/trunk/src/contrib/hdfsproxy:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/src/contrib/hdfsproxy:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,832043

Propchange: hadoop/hdfs/branches/branch-0.21/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/java:713112
 /hadoop/core/trunk/src/hdfs:776175-785643,785929-786278
 /hadoop/hdfs/branches/HDFS-265/src/java:796829-820463
-/hadoop/hdfs/trunk/src/java:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/src/java:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,832043

Propchange: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1,5 +1,5 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/java/org/apache/hadoop/hdfs/server/datanode/RecoveryInProgressException.java:713112
 /hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/RecoveryInProgressException.java:776175-785643,785929-786278
 /hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/RecoveryInProgressException.java:817353-818319,818321-818553
-/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java:824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/RecoveryInProgressException.java:824552,824944,826149,828116,828926,829880,829894,830003,831436,832043
 /hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/RecoveryInProgressException.java:796829-800617,800619-803337,804756-805652,808672-809439,811495-813103,813105-813630,814223-815964,818294-818298

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java?rev=832049&r1=832048&r2=832049&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
Mon Nov  2 18:56:45 2009
@@ -843,8 +843,7 @@
               lastPacket = true;
             }
 
-            replyOut.writeLong(expected);
-            SUCCESS.write(replyOut);
+            ackReply(expected);
             replyOut.flush();
             // remove the packet from the ack queue
             removeAckHead();
@@ -871,6 +870,14 @@
                " for block " + block + " terminating");
     }
 
+    // This method is introduced to facilitate testing. Otherwise
+    // there was a little chance to bind an AspectJ advice to such a sequence
+    // of calls
+    private void ackReply(long expected) throws IOException {
+      replyOut.writeLong(expected);
+      SUCCESS.write(replyOut);
+    }
+
     /**
      * Thread to process incoming acks.
      * @see java.lang.Runnable#run()
@@ -984,8 +991,7 @@
             }
 
             // send my status back to upstream datanode
-            replyOut.writeLong(expected); // send seqno upstream
-            SUCCESS.write(replyOut);
+            ackReply(expected);
 
             LOG.debug("PacketResponder " + numTargets + 
                       " for block " + block +

Propchange: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -3,4 +3,4 @@
 /hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeBlockInfo.java:776175-785643,785929-786278
 /hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:776175-785643,785929-786278
 /hadoop/hdfs/branches/HDFS-265/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:796829-820463
-/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,832043

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj?rev=832049&r1=832048&r2=832049&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/DFSClientAspects.aj
Mon Nov  2 18:56:45 2009
@@ -22,13 +22,15 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fi.DataTransferTestUtil;
+import org.apache.hadoop.fi.PipelineTest;
 import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest;
 import org.apache.hadoop.hdfs.DFSClient.DFSOutputStream;
 import org.apache.hadoop.hdfs.DFSClient.DFSOutputStream.DataStreamer;
+import org.apache.hadoop.hdfs.PipelinesTestUtil.PipelinesTest;
 import org.junit.Assert;
 
 /** Aspects for DFSClient */
-public aspect DFSClientAspects {
+privileged public aspect DFSClientAspects {
   public static final Log LOG = LogFactory.getLog(DFSClientAspects.class);
 
   pointcut callCreateBlockOutputStream(DataStreamer datastreamer):
@@ -93,4 +95,19 @@
   before(DFSOutputStream out) : pipelineClose(out) {
     LOG.info("FI: before pipelineClose:");
   }
+
+  pointcut checkAckQueue(DFSClient.DFSOutputStream.Packet cp):
+    call (void DFSClient.DFSOutputStream.waitAndQueuePacket(
+            DFSClient.DFSOutputStream.Packet))
+    && withincode (void DFSClient.DFSOutputStream.writeChunk(..))
+    && args(cp);
+
+  after(DFSClient.DFSOutputStream.Packet cp) : checkAckQueue (cp) {
+    PipelineTest pTest = DataTransferTestUtil.getDataTransferTest();
+    if (pTest != null && pTest instanceof PipelinesTest) {
+      LOG.debug("FI: Recording packet # " + cp.seqno
+          + " where queuing has occurred");
+      ((PipelinesTest) pTest).setVerified(cp.seqno);
+    }
+  }
 }

Propchange: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/protocol/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1 +1 @@
-/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/protocol:824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/hdfs/protocol:824552,824944,826149,828116,828926,829880,829894,830003,831436,832043

Modified: hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj?rev=832049&r1=832048&r2=832049&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/aop/org/apache/hadoop/hdfs/server/datanode/BlockReceiverAspects.aj
Mon Nov  2 18:56:45 2009
@@ -24,8 +24,12 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fi.DataTransferTestUtil;
+import org.apache.hadoop.fi.PipelineTest;
 import org.apache.hadoop.fi.ProbabilityModel;
 import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest;
+import org.apache.hadoop.hdfs.server.datanode.BlockReceiver.PacketResponder;
+import org.apache.hadoop.hdfs.PipelinesTestUtil.PipelinesTest;
+import org.apache.hadoop.hdfs.PipelinesTestUtil.NodeBytes;
 import org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Status;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
 import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
@@ -34,7 +38,7 @@
  * This aspect takes care about faults injected into datanode.BlockReceiver 
  * class 
  */
-public privileged aspect BlockReceiverAspects {
+privileged public aspect BlockReceiverAspects {
   public static final Log LOG = LogFactory.getLog(BlockReceiverAspects.class);
 
   pointcut callReceivePacket(BlockReceiver blockreceiver) :
@@ -60,6 +64,107 @@
         thisJoinPoint.getStaticPart( ).getSourceLocation());
     }
   }
+  
+  // Pointcuts and advises for TestFiPipelines  
+  pointcut callSetNumBytes(BlockReceiver br, long offset) : 
+    call (void ReplicaInPipelineInterface.setNumBytes(long)) 
+    && withincode (int BlockReceiver.receivePacket(long, long, boolean, int, int))
+    && args(offset) 
+    && this(br);
+  
+  after(BlockReceiver br, long offset) : callSetNumBytes(br, offset) {
+    LOG.debug("FI: Received bytes To: " + br.datanode.dnRegistration.getStorageID() + ":
" + offset);
+    PipelineTest pTest = DataTransferTestUtil.getDataTransferTest();
+    if (pTest == null) {
+      LOG.debug("FI: no pipeline has been found in receiving");
+      return;
+    }
+    if (!(pTest instanceof PipelinesTest)) {
+      return;
+    }
+    NodeBytes nb = new NodeBytes(br.datanode.dnRegistration, offset);
+    try {
+      ((PipelinesTest)pTest).fiCallSetNumBytes.run(nb);
+    } catch (IOException e) {
+      LOG.fatal("FI: no exception is expected here!");
+    }
+  }
+  
+  // Pointcuts and advises for TestFiPipelines  
+  pointcut callSetBytesAcked(PacketResponder pr, long acked) : 
+    call (void ReplicaInPipelineInterface.setBytesAcked(long)) 
+    && withincode (void PacketResponder.run())
+    && args(acked) 
+    && this(pr);
+
+  pointcut callSetBytesAckedLastDN(PacketResponder pr, long acked) : 
+    call (void ReplicaInPipelineInterface.setBytesAcked(long)) 
+    && withincode (void PacketResponder.lastDataNodeRun())
+    && args(acked) 
+    && this(pr);
+  
+  after (PacketResponder pr, long acked) : callSetBytesAcked (pr, acked) {
+    PipelineTest pTest = DataTransferTestUtil.getDataTransferTest();
+    if (pTest == null) {
+      LOG.debug("FI: no pipeline has been found in acking");
+      return;
+    }
+    LOG.debug("FI: Acked total bytes from: " + 
+        pr.receiver.datanode.dnRegistration.getStorageID() + ": " + acked);
+    if (pTest instanceof PipelinesTest) {
+      bytesAckedService((PipelinesTest)pTest, pr, acked);
+    }
+  }
+  after (PacketResponder pr, long acked) : callSetBytesAckedLastDN (pr, acked) {
+    PipelineTest pTest = DataTransferTestUtil.getDataTransferTest();
+    if (pTest == null) {
+      LOG.debug("FI: no pipeline has been found in acking");
+      return;
+    }
+    LOG.debug("FI: Acked total bytes from (last DN): " + 
+        pr.receiver.datanode.dnRegistration.getStorageID() + ": " + acked);
+    if (pTest instanceof PipelinesTest) {
+      bytesAckedService((PipelinesTest)pTest, pr, acked); 
+    }
+  }
+  
+  private void bytesAckedService 
+      (final PipelinesTest pTest, final PacketResponder pr, final long acked) {
+    NodeBytes nb = new NodeBytes(pr.receiver.datanode.dnRegistration, acked);
+    try {
+      pTest.fiCallSetBytesAcked.run(nb);
+    } catch (IOException e) {
+      LOG.fatal("No exception should be happening at this point");
+      assert false;
+    }
+  }
+  
+  pointcut preventAckSending () :
+    call (void ackReply(long)) 
+    && within (PacketResponder);
+
+  static int ackCounter = 0;
+  void around () : preventAckSending () {
+    PipelineTest pTest = DataTransferTestUtil.getDataTransferTest();
+
+    if (pTest == null) { 
+      LOG.debug("FI: remove first ack as expected");
+      proceed();
+      return;
+    }
+    if (!(pTest instanceof PipelinesTest)) {
+      LOG.debug("FI: remove first ack as expected");
+      proceed();
+      return;
+    }
+    if (((PipelinesTest)pTest).getSuspend()) {
+        LOG.debug("FI: suspend the ack");
+        return;
+    }
+    LOG.debug("FI: remove first ack as expected");
+    proceed();
+  }
+  // End of pointcuts and advises for TestFiPipelines  
 
   pointcut pipelineClose(BlockReceiver blockreceiver, long offsetInBlock, long seqno,
       boolean lastPacketInBlock, int len, int endOfHeader) :

Propchange: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/test/hdfs:713112
 /hadoop/core/trunk/src/test/hdfs:776175-785643
 /hadoop/hdfs/branches/HDFS-265/src/test/hdfs:796829-820463
-/hadoop/hdfs/trunk/src/test/hdfs:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/src/test/hdfs:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,832043

Modified: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestReadWhileWriting.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestReadWhileWriting.java?rev=832049&r1=832048&r2=832049&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestReadWhileWriting.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestReadWhileWriting.java
Mon Nov  2 18:56:45 2009
@@ -17,10 +17,6 @@
  */
 package org.apache.hadoop.hdfs;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -28,13 +24,16 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.log4j.Level;
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
 /** Test reading from hdfs while a file is being written. */
 public class TestReadWhileWriting {
   {
@@ -48,10 +47,10 @@
   
   /** Test reading while writing. */
   @Test
-  public void testReadWhileWriting() throws Exception {
+  public void pipeline_02_03() throws Exception {
     final Configuration conf = new HdfsConfiguration();
     //enable append
-    conf.setBoolean("dfs.support.append", true);
+    conf.setBoolean(DFSConfigKeys.DFS_SUPPORT_APPEND_KEY, true);
 
     // create cluster
     final MiniDFSCluster cluster = new MiniDFSCluster(conf, 3, true, null);

Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/datanode/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/webapps/datanode:713112
 /hadoop/core/trunk/src/webapps/datanode:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/datanode:796829-820463
-/hadoop/hdfs/trunk/src/webapps/datanode:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/src/webapps/datanode:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,832043

Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/webapps/hdfs:713112
 /hadoop/core/trunk/src/webapps/hdfs:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/hdfs:796829-820463
-/hadoop/hdfs/trunk/src/webapps/hdfs:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/src/webapps/hdfs:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,832043

Propchange: hadoop/hdfs/branches/branch-0.21/src/webapps/secondary/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Nov  2 18:56:45 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/branch-0.19/hdfs/src/webapps/secondary:713112
 /hadoop/core/trunk/src/webapps/secondary:776175-784663
 /hadoop/hdfs/branches/HDFS-265/src/webapps/secondary:796829-820463
-/hadoop/hdfs/trunk/src/webapps/secondary:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436
+/hadoop/hdfs/trunk/src/webapps/secondary:818294-818298,824552,824944,826149,828116,828926,829880,829894,830003,831436,832043



Mime
View raw message