hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From s..@apache.org
Subject svn commit: r829184 - in /hadoop/hdfs/branches/branch-0.21: ./ src/java/org/apache/hadoop/hdfs/protocol/ src/java/org/apache/hadoop/hdfs/server/common/ src/java/org/apache/hadoop/hdfs/server/datanode/ src/java/org/apache/hadoop/hdfs/server/namenode/ sr...
Date Fri, 23 Oct 2009 19:27:30 GMT
Author: shv
Date: Fri Oct 23 19:27:20 2009
New Revision: 829184

URL: http://svn.apache.org/viewvc?rev=829184&view=rev
Log:
HDFS-512. Merge -r 829181:829182 from trunk to BRANCH-0.21.

Modified:
    hadoop/hdfs/branches/branch-0.21/CHANGES.txt
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/protocol/Block.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/common/GenerationStamp.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/DatanodeDescriptor.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
    hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCorruption.java
    hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestGetBlocks.java
    hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java

Modified: hadoop/hdfs/branches/branch-0.21/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/CHANGES.txt?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/CHANGES.txt (original)
+++ hadoop/hdfs/branches/branch-0.21/CHANGES.txt Fri Oct 23 19:27:20 2009
@@ -28,6 +28,9 @@
 
     HDFS-660. Remove deprecated methods from InterDatanodeProtocol. (shv)
 
+    HDFS-512. Block.equals() and compareTo() compare blocks based
+    only on block Ids, ignoring generation stamps. (shv)
+
   NEW FEATURES
 
     HDFS-436. Introduce AspectJ framework for HDFS code and tests.

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/protocol/Block.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/protocol/Block.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/protocol/Block.java (original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/protocol/Block.java Fri
Oct 23 19:27:20 2009
@@ -40,10 +40,6 @@
        });
   }
 
-  // generation stamp of blocks that pre-date the introduction of
-  // a generation stamp.
-  public static final long GRANDFATHER_GENERATION_STAMP = 0;
-
   public static final Pattern blockFilePattern = Pattern
       .compile(BLOCK_FILE_PREFIX + "(-??\\d++)$");
   public static final Pattern metaFilePattern = Pattern
@@ -70,7 +66,7 @@
   public static long getGenerationStamp(String metaFile) {
     Matcher m = metaFilePattern.matcher(metaFile);
     return m.matches() ? Long.parseLong(m.group(2))
-        : GRANDFATHER_GENERATION_STAMP;
+        : GenerationStamp.GRANDFATHER_GENERATION_STAMP;
   }
 
   /**
@@ -91,9 +87,13 @@
     set(blkid, len, generationStamp);
   }
 
-  public Block(final long blkid) {this(blkid, 0, GenerationStamp.WILDCARD_STAMP);}
+  public Block(final long blkid) {
+    this(blkid, 0, GenerationStamp.GRANDFATHER_GENERATION_STAMP);
+  }
 
-  public Block(Block blk) {this(blk.blockId, blk.numBytes, blk.generationStamp);}
+  public Block(Block blk) {
+    this(blk.blockId, blk.numBytes, blk.generationStamp);
+  }
 
   /**
    * Find the blockid from the given filename
@@ -164,32 +164,13 @@
     }
   }
 
-  /////////////////////////////////////
-  // Comparable
-  /////////////////////////////////////
-  static void validateGenerationStamp(long generationstamp) {
-    if (generationstamp == GenerationStamp.WILDCARD_STAMP) {
-      throw new IllegalStateException("generationStamp (=" + generationstamp
-          + ") == GenerationStamp.WILDCARD_STAMP");
-    }    
-  }
-
-  /** {@inheritDoc} */
+  @Override // Comparable
   public int compareTo(Block b) {
-    //Wildcard generationStamp is NOT ALLOWED here
-    validateGenerationStamp(this.generationStamp);
-    validateGenerationStamp(b.generationStamp);
-
-    if (blockId < b.blockId) {
-      return -1;
-    } else if (blockId == b.blockId) {
-      return GenerationStamp.compare(generationStamp, b.generationStamp);
-    } else {
-      return 1;
-    }
+    return blockId < b.blockId ? -1 :
+           blockId > b.blockId ? 1 : 0;
   }
 
-  /** {@inheritDoc} */
+  @Override // Object
   public boolean equals(Object o) {
     if (this == o) {
       return true;
@@ -197,14 +178,10 @@
     if (!(o instanceof Block)) {
       return false;
     }
-    final Block that = (Block)o;
-    //Wildcard generationStamp is ALLOWED here
-    return this.blockId == that.blockId
-      && GenerationStamp.equalsWithWildcard(
-          this.generationStamp, that.generationStamp);
+    return compareTo((Block)o) == 0;
   }
 
-  /** {@inheritDoc} */
+  @Override // Object
   public int hashCode() {
     //GenerationStamp is IRRELEVANT and should not be used here
     return (int)(blockId^(blockId>>>32));

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/common/GenerationStamp.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/common/GenerationStamp.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/common/GenerationStamp.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/common/GenerationStamp.java
Fri Oct 23 19:27:20 2009
@@ -17,35 +17,36 @@
  */
 package org.apache.hadoop.hdfs.server.common;
 
-import java.io.*;
-import org.apache.hadoop.io.*;
-
 /****************************************************************
  * A GenerationStamp is a Hadoop FS primitive, identified by a long.
  ****************************************************************/
-public class GenerationStamp implements WritableComparable<GenerationStamp> {
-  public static final long WILDCARD_STAMP = 1;
+public class GenerationStamp implements Comparable<GenerationStamp> {
+  /**
+   * The first valid generation stamp.
+   */
   public static final long FIRST_VALID_STAMP = 1000L;
 
-  static {                                      // register a ctor
-    WritableFactories.setFactory
-      (GenerationStamp.class,
-       new WritableFactory() {
-         public Writable newInstance() { return new GenerationStamp(0); }
-       });
-  }
+  /**
+   * Generation stamp of blocks that pre-date the introduction
+   * of a generation stamp.
+   */
+  public static final long GRANDFATHER_GENERATION_STAMP = 0;
 
-  long genstamp;
+  private volatile long genstamp;
 
   /**
    * Create a new instance, initialized to FIRST_VALID_STAMP.
    */
-  public GenerationStamp() {this(GenerationStamp.FIRST_VALID_STAMP);}
+  public GenerationStamp() {
+    this(GenerationStamp.FIRST_VALID_STAMP);
+  }
 
   /**
    * Create a new instance, initialized to the specified value.
    */
-  GenerationStamp(long stamp) {this.genstamp = stamp;}
+  GenerationStamp(long stamp) {
+    this.genstamp = stamp;
+  }
 
   /**
    * Returns the current generation stamp
@@ -69,45 +70,21 @@
     return this.genstamp;
   }
 
-  /////////////////////////////////////
-  // Writable
-  /////////////////////////////////////
-  public void write(DataOutput out) throws IOException {
-    out.writeLong(genstamp);
-  }
-
-  public void readFields(DataInput in) throws IOException {
-    this.genstamp = in.readLong();
-    if (this.genstamp < 0) {
-      throw new IOException("Bad Generation Stamp: " + this.genstamp);
-    }
-  }
-
-  /////////////////////////////////////
-  // Comparable
-  /////////////////////////////////////
-  public static int compare(long x, long y) {
-    return x < y? -1: x == y? 0: 1;
-  }
-
-  /** {@inheritDoc} */
+  @Override // Comparable
   public int compareTo(GenerationStamp that) {
-    return compare(this.genstamp, that.genstamp);
+    return this.genstamp < that.genstamp ? -1 :
+           this.genstamp > that.genstamp ? 1 : 0;
   }
 
-  /** {@inheritDoc} */
+  @Override // Object
   public boolean equals(Object o) {
     if (!(o instanceof GenerationStamp)) {
       return false;
     }
-    return genstamp == ((GenerationStamp)o).genstamp;
-  }
-
-  public static boolean equalsWithWildcard(long x, long y) {
-    return x == y || x == WILDCARD_STAMP || y == WILDCARD_STAMP;  
+    return compareTo((GenerationStamp)o) == 0;
   }
 
-  /** {@inheritDoc} */
+  @Override // Object
   public int hashCode() {
     return (int) (genstamp^(genstamp>>>32));
   }

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DataBlockScanner.java
Fri Oct 23 19:27:20 2009
@@ -49,6 +49,7 @@
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.server.common.GenerationStamp;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.StringUtils;
 
@@ -373,7 +374,7 @@
   static private class LogEntry {
     long blockId = -1;
     long verificationTime = -1;
-    long genStamp = Block.GRANDFATHER_GENERATION_STAMP;
+    long genStamp = GenerationStamp.GRANDFATHER_GENERATION_STAMP;
     
     /**
      * The format consists of single line with multiple entries. each 

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
Fri Oct 23 19:27:20 2009
@@ -32,8 +32,8 @@
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.fs.FileUtil.HardLink;
-import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
+import org.apache.hadoop.hdfs.server.common.GenerationStamp;
 import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException;
 import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.StorageInfo;
@@ -498,7 +498,7 @@
     if (matcher.matches()) {
       //return the current metadata file name
       return FSDataset.getMetaFileName(matcher.group(1),
-                                       Block.GRANDFATHER_GENERATION_STAMP); 
+          GenerationStamp.GRANDFATHER_GENERATION_STAMP); 
     }
     return oldFileName;
   }

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
Fri Oct 23 19:27:20 2009
@@ -27,6 +27,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.server.common.GenerationStamp;
 import org.apache.hadoop.hdfs.server.datanode.FSDataset.FSVolume;
 
 /**
@@ -118,7 +119,7 @@
 
     public long getGenStamp() {
       return metaFile != null ? Block.getGenerationStamp(metaFile.getName()) :
-        Block.GRANDFATHER_GENERATION_STAMP;
+        GenerationStamp.GRANDFATHER_GENERATION_STAMP;
     }
   }
 

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
Fri Oct 23 19:27:20 2009
@@ -58,6 +58,7 @@
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
 import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
+import org.apache.hadoop.hdfs.server.common.GenerationStamp;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.ReplicaState;
 import org.apache.hadoop.io.IOUtils;
 
@@ -174,26 +175,6 @@
       return children[ lastChildIdx ].addBlock(b, src, true, false); 
     }
 
-    /** Find the metadata file for the specified block file.
-     * Return the generation stamp from the name of the metafile.
-     */
-    long getGenerationStampFromFile(File[] listdir, File blockFile) {
-      String blockName = blockFile.getName();
-      for (int j = 0; j < listdir.length; j++) {
-        String path = listdir[j].getName();
-        if (!path.startsWith(blockName)) {
-          continue;
-        }
-        if (blockFile == listdir[j]) {
-          continue;
-        }
-        return Block.getGenerationStamp(listdir[j].getName());
-      }
-      DataNode.LOG.warn("Block " + blockFile + 
-                        " does not have a metafile!");
-      return Block.GRANDFATHER_GENERATION_STAMP;
-    }
-
     void getVolumeMap(ReplicasMap volumeMap, FSVolume volume) 
     throws IOException {
       if (children != null) {
@@ -722,7 +703,7 @@
     }
     DataNode.LOG.warn("Block " + blockFile + 
                       " does not have a metafile!");
-    return Block.GRANDFATHER_GENERATION_STAMP;
+    return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
   }
 
   /** Find the corresponding meta data file from a given block file */
@@ -1799,7 +1780,7 @@
 
       final long diskGS = diskMetaFile != null && diskMetaFile.exists() ?
           Block.getGenerationStamp(diskMetaFile.getName()) :
-            Block.GRANDFATHER_GENERATION_STAMP;
+            GenerationStamp.GRANDFATHER_GENERATION_STAMP;
 
       if (diskFile == null || !diskFile.exists()) {
         if (memBlockInfo == null) {
@@ -1889,7 +1870,7 @@
           // as the block file, then use the generation stamp from it
           long gs = diskMetaFile != null && diskMetaFile.exists()
               && diskMetaFile.getParent().equals(memFile.getParent()) ? diskGS
-              : Block.GRANDFATHER_GENERATION_STAMP;
+              : GenerationStamp.GRANDFATHER_GENERATION_STAMP;
 
           DataNode.LOG.warn("Updating generation stamp for block " + blockId
               + " from " + memBlockInfo.getGenerationStamp() + " to " + gs);

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlockManager.java
Fri Oct 23 19:27:20 2009
@@ -36,7 +36,6 @@
 import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
-import org.apache.hadoop.hdfs.server.common.GenerationStamp;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.BlockUCState;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.ReplicaState;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem.NumberReplicas;
@@ -1000,7 +999,7 @@
                                DatanodeDescriptor node,
                                DatanodeDescriptor delNodeHint)
   throws IOException {
-    BlockInfo storedBlock = findStoredBlock(block.getBlockId());
+    BlockInfo storedBlock = blocksMap.getStoredBlock(block);
     if (storedBlock == null || storedBlock.getINode() == null) {
       // If this block does not belong to anyfile, then we are done.
       NameNode.stateChangeLog.info("BLOCK* NameSystem.addStoredBlock: "
@@ -1469,14 +1468,6 @@
     return blocksMap.getStoredBlock(block);
   }
 
-  /**
-   * Find the block by block ID.
-   */
-  BlockInfo findStoredBlock(long blockId) {
-    Block wildcardBlock = new Block(blockId, 0, GenerationStamp.WILDCARD_STAMP);
-    return blocksMap.getStoredBlock(wildcardBlock);
-  }
-
   /* updates a block in under replication queue */
   void updateNeededReplications(Block block, int curReplicasDelta,
       int expectedReplicasDelta) {
@@ -1667,19 +1658,7 @@
   void removeBlockFromMap(Block block) {
     blocksMap.removeBlock(block);
   }
-  
-  /**
-   * Update the block with the new generation stamp and new length.
-   * 
-   * @param block block
-   * @param newGS new generation stamp
-   * @param newLen new block size
-   * @return the stored block in the blocks map
-   */
-  BlockInfo updateBlock(Block block, long newGS, long newLen) {
-    return blocksMap.updateBlock(block, newGS, newLen);
-  }
-  
+
   int getCapacity() {
     synchronized(namesystem) {
       return blocksMap.getCapacity();

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/BlocksMap.java
Fri Oct 23 19:27:20 2009
@@ -86,25 +86,6 @@
   }
 
   /**
-   * Update the old block with the new generation stamp and new length.
-   * 
-   * After update, the block has a newer generation stamp so it requires 
-   * to remove the old entry first and reinsert the entry
-   * 
-   * @param block block
-   * @param newGS new generation stamp
-   * @param newLen new block size
-   * @return the stored block in the map
-   */
-  BlockInfo updateBlock(Block block, long newGS, long newLen) {
-    BlockInfo blockInfo = map.remove(block);
-    blockInfo.setGenerationStamp(newGS);
-    blockInfo.setNumBytes(newLen);
-    map.put(blockInfo, blockInfo);
-    return blockInfo;
-  }
-  
-  /**
    * Remove the block from the block map;
    * remove it from all data-node lists it belongs to;
    * and remove all data-node locations associated with the block.

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/DatanodeDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/DatanodeDescriptor.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/DatanodeDescriptor.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/DatanodeDescriptor.java
Fri Oct 23 19:27:20 2009
@@ -468,7 +468,7 @@
         + " replicaState = " + rState);
 
     // find block by blockId
-    BlockInfo storedBlock = blockManager.findStoredBlock(block.getBlockId());
+    BlockInfo storedBlock = blockManager.blocksMap.getStoredBlock(block);
     if(storedBlock == null) {
       // If blocksMap does not contain reported block id,
       // the replica should be removed from the data-node.
@@ -485,14 +485,6 @@
       switch(storedBlock.getBlockUCState()) {
       case COMPLETE:
       case COMMITTED:
-        // This is a temporary hack until Block.equals() and compareTo()
-        // are changed not to take into account the generation stamp for searching
-        // in  blocksMap
-        if(storedBlock.getGenerationStamp() != block.getGenerationStamp()) {
-          toInvalidate.add(new Block(block));
-          return storedBlock;
-        }
-
         if(storedBlock.getGenerationStamp() != block.getGenerationStamp()
             || storedBlock.getNumBytes() != block.getNumBytes())
           isCorrupt = true;

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
Fri Oct 23 19:27:20 2009
@@ -32,6 +32,7 @@
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
+import org.apache.hadoop.hdfs.server.common.GenerationStamp;
 import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.NamenodeRole;
 import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
@@ -1251,7 +1252,8 @@
         blk.readFields(in);
       } else {
         oldblk.readFields(in);
-        blk.set(oldblk.blkid, oldblk.len, Block.GRANDFATHER_GENERATION_STAMP);
+        blk.set(oldblk.blkid, oldblk.len,
+                GenerationStamp.GRANDFATHER_GENERATION_STAMP);
       }
       if(isFileUnderConstruction && i == numBlocks-1)
         blocks[i] = new BlockInfoUnderConstruction(blk, replication);

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSImage.java
Fri Oct 23 19:27:20 2009
@@ -51,6 +51,7 @@
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
+import org.apache.hadoop.hdfs.server.common.GenerationStamp;
 import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException;
 import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.StorageInfo;
@@ -1077,7 +1078,7 @@
             blocks[j] = new Block();
             if (-14 < imgVersion) {
               blocks[j].set(in.readLong(), in.readLong(), 
-                            Block.GRANDFATHER_GENERATION_STAMP);
+                            GenerationStamp.GRANDFATHER_GENERATION_STAMP);
             } else {
               blocks[j].readFields(in);
             }

Modified: hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
Fri Oct 23 19:27:20 2009
@@ -3971,13 +3971,10 @@
       LOG.warn(msg);
       throw new IOException(msg);
     }
-    
-    // Remove old block from the raw map in blocks map. 
-    // This does not change any other value of the oldblockinfo
-    // This always have to be done
-    // because the generation stamp of this block is changing.
-    blockManager.updateBlock(oldBlock, 
-        newBlock.getGenerationStamp(), newBlock.getNumBytes());
+
+    // Update old block with the new generation stamp and new length
+    blockinfo.setGenerationStamp(newBlock.getGenerationStamp());
+    blockinfo.setNumBytes(newBlock.getNumBytes());
 
     // find the DatanodeDescriptor objects
     DatanodeDescriptor[] descriptors = null;

Modified: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCorruption.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCorruption.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCorruption.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestFileCorruption.java
Fri Oct 23 19:27:20 2009
@@ -156,7 +156,7 @@
       return null;
     }
     long blockId = Long.parseLong(blockFileName.substring("blk_".length()));
-    long blockTimeStamp = GenerationStamp.WILDCARD_STAMP;
+    long blockTimeStamp = GenerationStamp.GRANDFATHER_GENERATION_STAMP;
     for (idx=0; idx < blocks.length; idx++) {
       String fileName = blocks[idx].getName();
       if (fileName.startsWith(blockFileName) && fileName.endsWith(".meta")) {

Modified: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestGetBlocks.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestGetBlocks.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestGetBlocks.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/TestGetBlocks.java
Fri Oct 23 19:27:20 2009
@@ -145,7 +145,7 @@
     assertTrue(getException);
   }
  
-  public void testGenerationStampWildCard() {
+  public void testBlockKey() {
     Map<Block, Long> map = new HashMap<Block, Long>();
     final Random RAN = new Random();
     final long seed = RAN.nextLong();
@@ -160,7 +160,7 @@
     System.out.println("map=" + map.toString().replace(",", "\n  "));
     
     for(int i = 0; i < blkids.length; i++) {
-      Block b = new Block(blkids[i], 0, GenerationStamp.WILDCARD_STAMP);
+      Block b = new Block(blkids[i], 0, GenerationStamp.GRANDFATHER_GENERATION_STAMP);
       Long v = map.get(b);
       System.out.println(b + " => " + v);
       assertEquals(blkids[i], v.longValue());

Modified: hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java?rev=829184&r1=829183&r2=829184&view=diff
==============================================================================
--- hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java
(original)
+++ hadoop/hdfs/branches/branch-0.21/src/test/hdfs/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java
Fri Oct 23 19:27:20 2009
@@ -33,6 +33,7 @@
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.server.common.GenerationStamp;
 import org.apache.hadoop.hdfs.server.datanode.FSDataset.FSVolume;
 
 import junit.framework.TestCase;
@@ -220,7 +221,7 @@
       // Test2: block metafile is missing
       long blockId = deleteMetaFile();
       scan(totalBlocks, 1, 1, 0, 0, 1);
-      verifyGenStamp(blockId, Block.GRANDFATHER_GENERATION_STAMP);
+      verifyGenStamp(blockId, GenerationStamp.GRANDFATHER_GENERATION_STAMP);
       scan(totalBlocks, 0, 0, 0, 0, 0);
 
       // Test3: block file is missing
@@ -235,7 +236,7 @@
       blockId = createBlockFile();
       totalBlocks++;
       scan(totalBlocks, 1, 1, 0, 1, 0);
-      verifyAddition(blockId, Block.GRANDFATHER_GENERATION_STAMP, 0);
+      verifyAddition(blockId, GenerationStamp.GRANDFATHER_GENERATION_STAMP, 0);
       scan(totalBlocks, 0, 0, 0, 0, 0);
 
       // Test5: A metafile exists for which there is no block file and



Mime
View raw message