hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r1077664 - in /hadoop/common/branches/branch-0.20-security-patches/src: hdfs/org/apache/hadoop/hdfs/protocol/ hdfs/org/apache/hadoop/hdfs/security/token/block/ hdfs/org/apache/hadoop/hdfs/server/namenode/ hdfs/org/apache/hadoop/hdfs/server/...
Date Fri, 04 Mar 2011 04:41:47 GMT
Author: omalley
Date: Fri Mar  4 04:41:47 2011
New Revision: 1077664

URL: http://svn.apache.org/viewvc?rev=1077664&view=rev
Log:
commit 736db26d01507ea4ab34f327aa4c7b8cff89d139
Author: Jakob Homan <jhoman@yahoo-inc.com>
Date:   Tue Sep 7 10:39:11 2010 -0700

    HDFS-1353 from https://issues.apache.org/jira/secure/attachment/12453823/HDFS-1353-y20.patch
    
    +++ b/YAHOO-CHANGES.txt
    +
    +    HDFS-1353. Remove most of getBlockLocation optimization (jghoman)

Modified:
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java?rev=1077664&r1=1077663&r2=1077664&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
Fri Mar  4 04:41:47 2011
@@ -50,12 +50,9 @@ public interface ClientProtocol extends 
    * Compared to the previous version the following changes have been introduced:
    * (Only the latest change is reflected.
    * The log of historical changes can be retrieved from the svn).
-   * 61: Serialized format of BlockTokenIdentifier changed to contain
-   *     multiple blocks within a single BlockTokenIdentifier 
-   *     
-   *     (bumped to 61 to bring in line with trunk)
+   * 62: Remove getBlockLocations optimization
    */
-  public static final long versionID = 61L;
+  public static final long versionID = 62L;
   
   ///////////////////////////////////////
   // File contents

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java?rev=1077664&r1=1077663&r2=1077664&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java
Fri Mar  4 04:41:47 2011
@@ -21,7 +21,6 @@ package org.apache.hadoop.hdfs.security.
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.util.Arrays;
 import java.util.EnumSet;
 
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager.AccessMode;
@@ -36,23 +35,20 @@ public class BlockTokenIdentifier extend
   private long expiryDate;
   private int keyId;
   private String userId;
-  private long [] blockIds;
+  private long blockId;
   private EnumSet<AccessMode> modes;
 
   private byte [] cache;
   
   public BlockTokenIdentifier() {
-    this(null, new long [] {}, EnumSet.noneOf(AccessMode.class));
+    this(null, 0l, EnumSet.noneOf(AccessMode.class));
   }
 
-  public BlockTokenIdentifier(String userId, long [] blockIds,
+  public BlockTokenIdentifier(String userId, long blockId,
       EnumSet<AccessMode> modes) {
-    if(blockIds == null) 
-      throw new IllegalArgumentException("blockIds can't be null");
     this.cache = null;
     this.userId = userId;
-    this.blockIds = Arrays.copyOf(blockIds, blockIds.length);
-    Arrays.sort(this.blockIds);
+    this.blockId  = blockId;
     this.modes = modes == null ? EnumSet.noneOf(AccessMode.class) : modes;
   }
 
@@ -64,7 +60,7 @@ public class BlockTokenIdentifier extend
   @Override
   public UserGroupInformation getUser() {
     if (userId == null || "".equals(userId)) {
-      return UserGroupInformation.createRemoteUser(Arrays.toString(blockIds));
+      return UserGroupInformation.createRemoteUser(Long.toString(blockId));
     }
     return UserGroupInformation.createRemoteUser(userId);
   }
@@ -91,25 +87,8 @@ public class BlockTokenIdentifier extend
     return userId;
   }
 
-  /**
-   * Return sorted array of blockIds this {@link BlockTokenIdentifier} includes
-   */
-  public long [] getBlockIds() {
-    return blockIds;
-  }
-  
-  /**
-   * Is specified blockId included in this BlockTokenIdentifier?
-   */
-  public boolean isBlockIncluded(long blockId) {
-    switch(blockIds.length) {
-    case 1:
-      return blockIds[0] == blockId;
-    case 2:
-      return (blockIds[0] == blockId) || (blockIds[1] == blockId);
-    default:
-      return Arrays.binarySearch(blockIds, blockId) >= 0;  
-    }
+  public long getBlockId() {
+    return blockId;
   }
 
   public EnumSet<AccessMode> getAccessModes() {
@@ -120,7 +99,7 @@ public class BlockTokenIdentifier extend
   public String toString() {
     return "block_token_identifier (expiryDate=" + this.getExpiryDate()
         + ", keyId=" + this.getKeyId() + ", userId=" + this.getUserId()
-        + ", blockIds=" + Arrays.toString(blockIds) + ", access modes="
+        + ", blockIds=" + blockId + ", access modes="
         + this.getAccessModes() + ")";
   }
 
@@ -137,7 +116,7 @@ public class BlockTokenIdentifier extend
       BlockTokenIdentifier that = (BlockTokenIdentifier) obj;
       return this.expiryDate == that.expiryDate && this.keyId == that.keyId
           && isEqual(this.userId, that.userId) 
-          && Arrays.equals(this.blockIds, that.blockIds)
+          && this.blockId == that.blockId
           && isEqual(this.modes, that.modes);
     }
     return false;
@@ -145,7 +124,7 @@ public class BlockTokenIdentifier extend
 
   /** {@inheritDoc} */
   public int hashCode() {
-    return (int) expiryDate ^ keyId ^ Arrays.hashCode(blockIds) ^ modes.hashCode()
+    return (int) expiryDate ^ keyId ^ (int)blockId ^ modes.hashCode()
         ^ (userId == null ? 0 : userId.hashCode());
   }
 
@@ -154,9 +133,7 @@ public class BlockTokenIdentifier extend
     expiryDate = WritableUtils.readVLong(in);
     keyId = WritableUtils.readVInt(in);
     userId = WritableUtils.readString(in);
-    blockIds = new long[WritableUtils.readVInt(in)];
-    for(int i = 0; i < blockIds.length; i++)
-      blockIds[i] = WritableUtils.readVLong(in);
+    blockId = WritableUtils.readVLong(in);
     int length = WritableUtils.readVInt(in);
     for (int i = 0; i < length; i++) {
       modes.add(WritableUtils.readEnum(in, AccessMode.class));
@@ -167,9 +144,7 @@ public class BlockTokenIdentifier extend
     WritableUtils.writeVLong(out, expiryDate);
     WritableUtils.writeVInt(out, keyId);
     WritableUtils.writeString(out, userId);
-    WritableUtils.writeVInt(out, blockIds.length);
-    for(int i = 0; i < blockIds.length; i++)
-      WritableUtils.writeVLong(out, blockIds[i]);
+    WritableUtils.writeVLong(out, blockId);
     WritableUtils.writeVInt(out, modes.size());
     for (AccessMode aMode : modes) {
       WritableUtils.writeEnum(out, aMode);

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java?rev=1077664&r1=1077663&r2=1077664&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
Fri Mar  4 04:41:47 2011
@@ -174,29 +174,16 @@ public class BlockTokenSecretManager ext
   /** Generate an block token for current user */
   public Token<BlockTokenIdentifier> generateToken(Block block,
       EnumSet<AccessMode> modes) throws IOException {
-    return generateToken(new long [] { block.getBlockId() }, modes);
+    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+    String userID = (ugi == null ? null : ugi.getShortUserName());
+    return generateToken(userID, block, modes);
   }
 
   /** Generate a block token for a specified user */
   public Token<BlockTokenIdentifier> generateToken(String userId, Block block,
       EnumSet<AccessMode> modes) throws IOException {
-    return generateToken(userId, new long [] { block.getBlockId() }, modes);
-  }
-
-  /** Generate a block token for the current user based on a collection
-   * of blockIds
-   */
-  public Token<BlockTokenIdentifier> generateToken(long[] blockIds,
-      EnumSet<AccessMode> modes) throws IOException {
-    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-    String userID = (ugi == null ? null : ugi.getShortUserName());
-    return generateToken(userID, blockIds, modes);
-  }
-  
-  /** Generate a block token based on a collection of blockIds */
-  public Token<BlockTokenIdentifier> generateToken(String userID,
-      long[] blockIds, EnumSet<AccessMode> modes) {
-    BlockTokenIdentifier id = new BlockTokenIdentifier(userID, blockIds, modes);
+    BlockTokenIdentifier id = new BlockTokenIdentifier(userId, 
+        block.getBlockId(), modes);
     return new Token<BlockTokenIdentifier>(id, this);
   }
 
@@ -215,7 +202,7 @@ public class BlockTokenSecretManager ext
       throw new InvalidToken("Block token with " + id.toString()
           + " doesn't belong to user " + userId);
     }
-    if (!id.isBlockIncluded(block.getBlockId())) {
+    if (id.getBlockId() != block.getBlockId()) {
       throw new InvalidToken("Block token with " + id.toString()
           + " doesn't apply to block " + block);
     }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1077664&r1=1077663&r2=1077664&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
Fri Mar  4 04:41:47 2011
@@ -17,74 +17,106 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
-import org.apache.commons.logging.*;
+import java.io.BufferedWriter;
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.lang.management.ManagementFactory;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Date;
+import java.util.EnumSet;
+import java.util.Formatter;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.Random;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.Map.Entry;
+import java.util.concurrent.TimeUnit;
+
+import javax.management.MBeanServer;
+import javax.management.NotCompliantMBeanException;
+import javax.management.ObjectName;
+import javax.management.StandardMBean;
 
-import org.apache.hadoop.conf.*;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.fs.permission.PermissionStatus;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
-import org.apache.hadoop.hdfs.protocol.*;
-import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
+import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
+import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
+import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.DirectoryListing;
+import org.apache.hadoop.hdfs.protocol.FSConstants;
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.protocol.UnregisteredDatanodeException;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager;
 import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
 import org.apache.hadoop.hdfs.server.common.GenerationStamp;
-import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
 import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
+import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
 import org.apache.hadoop.hdfs.server.namenode.BlocksMap.BlockInfo;
-import org.apache.hadoop.hdfs.server.namenode.metrics.FSNamesystemMBean;
-import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.SecretManager.InvalidToken;
-import org.apache.hadoop.security.token.delegation.DelegationKey;
-import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
-import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
-import org.apache.hadoop.util.*;
-import org.apache.hadoop.metrics2.util.MBeans;
-import org.apache.hadoop.net.CachedDNSToSwitchMapping;
-import org.apache.hadoop.net.DNSToSwitchMapping;
-import org.apache.hadoop.net.NetworkTopology;
-import org.apache.hadoop.net.ScriptBasedMapping;
 import org.apache.hadoop.hdfs.server.namenode.LeaseManager.Lease;
 import org.apache.hadoop.hdfs.server.namenode.UnderReplicatedBlocks.BlockIterator;
+import org.apache.hadoop.hdfs.server.namenode.metrics.FSNamesystemMBean;
 import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
-import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
 import org.apache.hadoop.hdfs.server.protocol.DisallowedDatanodeException;
 import org.apache.hadoop.hdfs.server.protocol.KeyUpdateCommand;
 import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
 import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
-import org.apache.hadoop.fs.ContentSummary;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.*;
+import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.Server;
+import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.net.CachedDNSToSwitchMapping;
+import org.apache.hadoop.net.DNSToSwitchMapping;
+import org.apache.hadoop.net.NetworkTopology;
+import org.apache.hadoop.net.ScriptBasedMapping;
+import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.SecretManager.InvalidToken;
+import org.apache.hadoop.security.token.delegation.DelegationKey;
+import org.apache.hadoop.util.Daemon;
+import org.apache.hadoop.util.HostsFileReader;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.VersionInfo;
 import org.mortbay.util.ajax.JSON;
 
-import java.io.BufferedWriter;
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.DataOutputStream;
-import java.lang.management.ManagementFactory;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.util.*;
-import java.util.concurrent.TimeUnit;
-import java.util.Map.Entry;
-
-import javax.management.MBeanServer;
-import javax.management.NotCompliantMBeanException;
-import javax.management.ObjectName;
-import javax.management.StandardMBean;
-
 /***************************************************
  * FSNamesystem does the actual bookkeeping work for the
  * DataNode.
@@ -924,7 +956,11 @@ public class FSNamesystem implements FSC
       }
       LocatedBlock b = new LocatedBlock(blocks[curBlk], machineSet, curPos,
           blockCorrupt);
-
+      if(isAccessTokenEnabled) {
+        b.setBlockToken(accessTokenHandler.generateToken(b.getBlock(), 
+            EnumSet.of(BlockTokenSecretManager.AccessMode.READ)));
+      }
+      
       results.add(b); 
       curPos += blocks[curBlk].getNumBytes();
       curBlk++;
@@ -932,24 +968,6 @@ public class FSNamesystem implements FSC
           && curBlk < blocks.length 
           && results.size() < nrBlocksToReturn);
     
-    if(isAccessTokenEnabled) {
-      // Generate a list of the blockIds to be returned for this request
-      long [] blockIds = new long[results.size()];
-      for(int i = 0; i < results.size(); i++) {
-        blockIds[i] = results.get(i).getBlock().getBlockId();
-      }
-      
-      // Generate a single BlockTokenIdentifier for all ids
-      Token<BlockTokenIdentifier> bti 
-        = accessTokenHandler.generateToken(blockIds, 
-            EnumSet.of(BlockTokenSecretManager.AccessMode.READ));
-      
-      // Assign a reference to this BlockTokenIdentifier to all blocks
-      for(LocatedBlock lb : results) {
-        lb.setBlockToken(bti);
-      }
-    }
-    
     return inode.createLocatedBlocks(results);
   }
 

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java?rev=1077664&r1=1077663&r2=1077664&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java
Fri Mar  4 04:41:47 2011
@@ -41,12 +41,9 @@ import org.apache.hadoop.security.Kerber
     clientPrincipal = DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY)
 public interface DatanodeProtocol extends VersionedProtocol {
   /**
-   * 25: Serialized format of BlockTokenIdentifier changed to contain
-   *     multiple blocks within a single BlockTokenIdentifier
-   *     
-   *     (bumped to 25 to bring in line with trunk)
+   * 26: Remove getBlockLocations optimization
    */
-  public static final long versionID = 25L;
+  public static final long versionID = 26L;
   
   // error code
   final static int NOTIFY = 0;

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java?rev=1077664&r1=1077663&r2=1077664&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java
Fri Mar  4 04:41:47 2011
@@ -18,11 +18,21 @@
 
 package org.apache.hadoop.hdfs.security.token.block;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyBoolean;
+import static org.mockito.Matchers.anyLong;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
 import java.net.InetSocketAddress;
-import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.Set;
 
@@ -30,8 +40,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
 import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.io.TestWritable;
@@ -46,18 +56,7 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.log4j.Level;
-
 import org.junit.Test;
-
-import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
-import static org.junit.Assert.*;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyBoolean;
-import static org.mockito.Matchers.anyLong;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.doAnswer;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
@@ -112,7 +111,7 @@ public class TestBlockToken {
         LOG.info("Got: " + id.toString());
         assertTrue("Received BlockTokenIdentifier is wrong", ident.equals(id));
         sm.checkAccess(id, null, block, BlockTokenSecretManager.AccessMode.WRITE);
-        result = new LocatedBlock(new Block(id.getBlockIds()[0]), null);
+        result = new LocatedBlock(new Block(id.getBlockId()), null);
       }
       return result;
     }
@@ -225,29 +224,4 @@ public class TestBlockToken {
       }
     }
   }
-
-  @Test
-  public void collectionOfBlocksActsSanely() {
-    final long[][] testBlockIds = new long [][] {{99l, 7l, -32l, 0l},
-                                                 {},
-                                                 {42l},
-                                                 {-5235l, 2352}};
-    final long [] notBlockIds = new long [] { 32l, 1l, -23423423l};
-    
-    for(long [] bids : testBlockIds) {
-      BlockTokenIdentifier bti = new BlockTokenIdentifier("Madame Butterfly", 
-          bids, EnumSet.noneOf(BlockTokenSecretManager.AccessMode.class));
-      
-      for(long bid : bids) assertTrue(bti.isBlockIncluded(bid));
-      
-      for(long nbid : notBlockIds) assertFalse(bti.isBlockIncluded(nbid));
-      
-      // BlockTokenIdentifiers maintain a sorted array of the block Ids.
-      long[] sorted = Arrays.copyOf(bids, bids.length);
-      Arrays.sort(sorted);
-      
-      assertTrue(Arrays.toString(bids)+" doesn't equal "+Arrays.toString(sorted), 
-          Arrays.equals(bti.getBlockIds(), sorted));
-    }
-  }
 }



Mime
View raw message