hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1101675 - in /hadoop/hdfs/trunk: ./ src/test/aop/org/apache/hadoop/fi/
Date Tue, 10 May 2011 23:08:18 GMT
Author: szetszwo
Date: Tue May 10 23:08:17 2011
New Revision: 1101675

URL: http://svn.apache.org/viewvc?rev=1101675&view=rev
Log:
HDFS-1908. Fix a NullPointerException in fi.DataTransferTestUtil.

Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/Pipeline.java
    hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/PipelineTest.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=1101675&r1=1101674&r2=1101675&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Tue May 10 23:08:17 2011
@@ -526,6 +526,9 @@ Trunk (unreleased changes)
     HDFS-1827. Fix timeout problem in TestBlockReplacement.  (Matt Foley
     via szetszwo)
 
+    HDFS-1908. Fix a NullPointerException in fi.DataTransferTestUtil.
+    (szetszwo)
+
 Release 0.22.0 - Unreleased
 
   NEW FEATURES

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java?rev=1101675&r1=1101674&r2=1101675&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java (original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/DataTransferTestUtil.java Tue May
10 23:08:17 2011
@@ -27,7 +27,6 @@ import org.apache.hadoop.fi.FiTestUtil.C
 import org.apache.hadoop.fi.FiTestUtil.CountdownConstraint;
 import org.apache.hadoop.fi.FiTestUtil.MarkerConstraint;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
-import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
 
@@ -55,7 +54,7 @@ public class DataTransferTestUtil {
    * and some actions.
    */
   public static class DataTransferTest implements PipelineTest {
-    private List<Pipeline> pipelines = new ArrayList<Pipeline>();
+    private final List<Pipeline> pipelines = new ArrayList<Pipeline>();
     private volatile boolean isSuccess = false;
 
     /** Simulate action for the receiverOpWriteBlock pointcut */
@@ -101,7 +100,8 @@ public class DataTransferTestUtil {
     }
 
     /** Initialize the pipeline. */
-    public Pipeline initPipeline(LocatedBlock lb) {
+    @Override
+    public synchronized Pipeline initPipeline(LocatedBlock lb) {
       final Pipeline pl = new Pipeline(lb);
       if (pipelines.contains(pl)) {
         throw new IllegalStateException("thepipeline != null");
@@ -110,20 +110,31 @@ public class DataTransferTestUtil {
       return pl;
     }
 
-    /** Return the pipeline. */
-    public Pipeline getPipeline(DatanodeID id) {
-      if (pipelines == null) {
-        throw new IllegalStateException("thepipeline == null");
-      }
-      StringBuilder dnString = new StringBuilder();
-      for (Pipeline pipeline : pipelines) {
-        for (DatanodeInfo dni : pipeline.getDataNodes())
-          dnString.append(dni.getStorageID());
-        if (dnString.toString().contains(id.getStorageID()))
-          return pipeline;
+    /** Return the pipeline for the datanode. */
+    @Override
+    public synchronized Pipeline getPipelineForDatanode(DatanodeID id) {
+      for (Pipeline p : pipelines) {
+        if (p.contains(id)){
+          return p;
+        }
       }
+      FiTestUtil.LOG.info("FI: pipeline not found; id=" + id
+          + ", pipelines=" + pipelines);
       return null;
     }
+
+    /**
+     * Is the test not yet success
+     * and the last pipeline contains the given datanode?
+     */
+    private synchronized boolean isNotSuccessAndLastPipelineContains(
+        int index, DatanodeID id) {
+      if (isSuccess()) {
+        return false;
+      }
+      final int n = pipelines.size();
+      return n == 0? false: pipelines.get(n-1).contains(index, id);
+    }
   }
 
   /** Action for DataNode */
@@ -171,8 +182,7 @@ public class DataTransferTestUtil {
     @Override
     public void run(DatanodeID datanodeid) throws IOException {
       final DataTransferTest test = getDataTransferTest();
-      final Pipeline p = test.getPipeline(datanodeid);
-      if (p.contains(index, datanodeid)) {
+      if (test.isNotSuccessAndLastPipelineContains(index, datanodeid)) {
         marker.mark();
       }
     }
@@ -193,8 +203,7 @@ public class DataTransferTestUtil {
     @Override
     public void run(DatanodeID id) {
       final DataTransferTest test = getDataTransferTest();
-      final Pipeline p = test.getPipeline(id);
-      if (!test.isSuccess() && p.contains(index, id)) {
+      if (test.isNotSuccessAndLastPipelineContains(index, id)) {
         final String s = toString(id);
         FiTestUtil.LOG.info(s);
         throw new OutOfMemoryError(s);
@@ -215,8 +224,8 @@ public class DataTransferTestUtil {
     @Override
     public void run(DatanodeID id) {
       final DataTransferTest test = getDataTransferTest();
-      final Pipeline p = test.getPipeline(id);
-      if (p.contains(index, id) && countdown.isSatisfied()) {
+      if (test.isNotSuccessAndLastPipelineContains(index, id)
+          && countdown.isSatisfied()) {
         final String s = toString(id);
         FiTestUtil.LOG.info(s);
         throw new OutOfMemoryError(s);
@@ -234,8 +243,7 @@ public class DataTransferTestUtil {
     @Override
     public void run(DatanodeID id) throws DiskOutOfSpaceException {
       final DataTransferTest test = getDataTransferTest();
-      final Pipeline p = test.getPipeline(id);
-      if (p.contains(index, id)) {
+      if (test.isNotSuccessAndLastPipelineContains(index, id)) {
         final String s = toString(id);
         FiTestUtil.LOG.info(s);
         throw new DiskOutOfSpaceException(s);
@@ -256,8 +264,7 @@ public class DataTransferTestUtil {
     @Override
     public void run(DatanodeID id) throws IOException {
       final DataTransferTest test = getDataTransferTest();
-      final Pipeline p = test.getPipeline(id);
-      if (p.contains(index, id)) {
+      if (test.isNotSuccessAndLastPipelineContains(index, id)) {
         final String s = toString(id);
         FiTestUtil.LOG.info(s);
         throw new IOException(s);
@@ -284,8 +291,8 @@ public class DataTransferTestUtil {
     @Override
     public void run(DatanodeID id) throws DiskOutOfSpaceException {
       final DataTransferTest test = getDataTransferTest();
-      final Pipeline p = test.getPipeline(id);
-      if (p.contains(index, id) && countdown.isSatisfied()) {
+      if (test.isNotSuccessAndLastPipelineContains(index, id)
+          && countdown.isSatisfied()) {
         final String s = toString(id);
         FiTestUtil.LOG.info(s);
         throw new DiskOutOfSpaceException(s);
@@ -339,8 +346,7 @@ public class DataTransferTestUtil {
     @Override
     public void run(DatanodeID id) {
       final DataTransferTest test = getDataTransferTest();
-      final Pipeline p = test.getPipeline(id);
-      if (!test.isSuccess() && p.contains(index, id)) {
+      if (test.isNotSuccessAndLastPipelineContains(index, id)) {
         FiTestUtil.LOG.info(toString(id));
         if (maxDuration <= 0) {
           for(; FiTestUtil.sleep(1000); ); //sleep forever until interrupt
@@ -385,8 +391,8 @@ public class DataTransferTestUtil {
     @Override
     public void run(DatanodeID id) {
       final DataTransferTest test = getDataTransferTest();
-      final Pipeline p = test.getPipeline(id);
-      if (p.contains(index, id) && countdown.isSatisfied()) {
+      if (test.isNotSuccessAndLastPipelineContains(index, id)
+          && countdown.isSatisfied()) {
         final String s = toString(id) + ", duration = ["
         + minDuration + "," + maxDuration + ")";
         FiTestUtil.LOG.info(s);

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java?rev=1101675&r1=1101674&r2=1101675&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java (original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/FiHFlushTestUtil.java Tue May 10 23:08:17
2011
@@ -44,9 +44,8 @@ public class FiHFlushTestUtil extends Da
 
     /** {@inheritDoc} */
     public void run(DatanodeID id) throws IOException {
-      final Pipeline p = getPipelineTest().getPipeline(id);
+      final Pipeline p = getPipelineTest().getPipelineForDatanode(id);
       if (p == null) {
-        FiTestUtil.LOG.info("FI: couldn't find a pipeline for " + id);
         return;
       }
       if (p.contains(index, id)) {

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/Pipeline.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/Pipeline.java?rev=1101675&r1=1101674&r2=1101675&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/Pipeline.java (original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/Pipeline.java Tue May 10 23:08:17
2011
@@ -26,13 +26,16 @@ import java.util.ArrayList;
 
 public class Pipeline {
   private final List<String> datanodes = new ArrayList<String>();
-  private DatanodeInfo[] nodes;
 
   Pipeline(LocatedBlock lb) {
     for(DatanodeInfo d : lb.getLocations()) {
       datanodes.add(d.getName());
     }
-    nodes = lb.getLocations();
+  }
+
+  /** Does the pipeline contains d? */
+  public boolean contains(DatanodeID d) {
+    return datanodes.contains(d.getName());
   }
 
   /** Does the pipeline contains d at the n th position? */
@@ -40,12 +43,7 @@ public class Pipeline {
     return d.getName().equals(datanodes.get(n));
   }
 
-  /** Returns DatanodeInfo[] of the nodes of the constructed pipiline*/
-  public DatanodeInfo[] getDataNodes () {
-    return nodes;
-  }
-
-  /** {@inheritDoc} */
+  @Override
   public String toString() {
     return getClass().getSimpleName() + datanodes;
   }

Modified: hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/PipelineTest.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/PipelineTest.java?rev=1101675&r1=1101674&r2=1101675&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/PipelineTest.java (original)
+++ hadoop/hdfs/trunk/src/test/aop/org/apache/hadoop/fi/PipelineTest.java Tue May 10 23:08:17
2011
@@ -23,5 +23,5 @@ import org.apache.hadoop.hdfs.protocol.L
 /** A pipeline contains a list of datanodes. */
 public interface PipelineTest {
   public Pipeline initPipeline(LocatedBlock lb);
-  public Pipeline getPipeline(DatanodeID id);
+  public Pipeline getPipelineForDatanode(DatanodeID id);
 }



Mime
View raw message