hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1177130 [1/2] - in /hadoop/common/branches/HDFS-1623/hadoop-hdfs-project: ./ hadoop-hdfs/ hadoop-hdfs/src/main/java/ hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/ hadoop-hdfs/s...
Date Thu, 29 Sep 2011 00:43:03 GMT
Author: suresh
Date: Thu Sep 29 00:42:47 2011
New Revision: 1177130

URL: http://svn.apache.org/viewvc?rev=1177130&view=rev
Log:
Merging trunk to HDFS-1623 branch

Added:
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
      - copied unchanged from r1177128, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenewerParam.java
      - copied unchanged from r1177128, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenewerParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestProcessCorruptBlocks.java
      - copied unchanged from r1177128, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestProcessCorruptBlocks.java
Modified:
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/OverwriteParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenameOptionSetParam.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/native/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/hdfs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSPermission.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationToken.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestHost2NodesMap.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestMulitipleNNDataBlockScanner.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestReplicasMap.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java
    hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Thu Sep 29 00:42:47 2011
@@ -0,0 +1 @@
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep 29 00:42:47 2011
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:1152502-1173011
+/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:1152502-1177128
 /hadoop/core/branches/branch-0.19/hdfs:713112
 /hadoop/hdfs/branches/HDFS-1052:987665-1095512
 /hadoop/hdfs/branches/HDFS-265:796829-820463

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Thu Sep 29 00:42:47 2011
@@ -16,6 +16,9 @@ Trunk (unreleased changes)
     HDFS-2318. Provide authentication to webhdfs using SPNEGO and delegation
     tokens.  (szetszwo)
 
+    HDFS-2340. Support getFileBlockLocations and getDelegationToken in webhdfs.
+    (szetszwo)
+
   IMPROVEMENTS
 
     HADOOP-7524 Change RPC to allow multiple protocols including multuple versions of the same protocol (sanjay Radia)
@@ -35,6 +38,18 @@ Trunk (unreleased changes)
     not use ArrayWritable for writing non-array items.  (Uma Maheswara Rao G
     via szetszwo)
 
+    HDFS-2351 Change Namenode and Datanode to register each of their protocols
+    seperately. (Sanjay Radia)
+
+    HDFS-2356.  Support case insensitive query parameter names in webhdfs.
+    (szetszwo)
+
+    HDFS-2368.  Move SPNEGO conf properties from hdfs-default.xml to
+    hdfs-site.xml.  (szetszwo)
+
+    HDFS-2355. Federation: enable using the same configuration file across 
+    all the nodes in the cluster. (suresh)
+
   BUG FIXES
     HDFS-2287. TestParallelRead has a small off-by-one bug. (todd)
 
@@ -57,6 +72,17 @@ Trunk (unreleased changes)
     IOExceptions of stream closures can mask root exceptions.  (Uma Maheswara
     Rao G via szetszwo)
 
+    HDFS-46.   Change default namespace quota of root directory from
+    Integer.MAX_VALUE to Long.MAX_VALUE.  (Uma Maheswara Rao G via szetszwo)
+
+    HDFS-2366. Initialize WebHdfsFileSystem.ugi in object construction.
+    (szetszwo)
+
+    HDFS-2373. Commands using webhdfs and hftp print unnecessary debug 
+    info on the console with security enabled. (Arpit Gupta via suresh)
+
+    HDFS-2361. hftp is broken, fixed username checks in JspHelper. (jitendra)
+
 Release 0.23.0 - Unreleased
 
   INCOMPATIBLE CHANGES
@@ -739,6 +765,12 @@ Release 0.23.0 - Unreleased
     HDFS-1217.  Change some NameNode methods from public to package private.
     (Laxman via szetszwo)
 
+    HDFS-2332. Add test for HADOOP-7629 (using an immutable FsPermission
+    object as an RPC parameter fails). (todd)
+
+    HDFS-2363. Move datanodes size printing from FSNamesystem.metasave(..)
+    to BlockManager.  (Uma Maheswara Rao G via szetszwo)
+
   OPTIMIZATIONS
 
     HDFS-1458. Improve checkpoint performance by avoiding unnecessary image
@@ -1607,7 +1639,11 @@ Release 0.22.0 - Unreleased
     HDFS-2232. Generalize regular expressions in TestHDFSCLI.
     (Plamen Jeliazkov via shv)
 
+    HDFS-2290. Block with corrupt replica is not getting replicated.
+    (Benoy Antony via shv)
+
 Release 0.21.1 - Unreleased
+
     HDFS-1466. TestFcHdfsSymlink relies on /tmp/test not existing. (eli)
 
     HDFS-874. TestHDFSFileContextMainOperations fails on weirdly 

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep 29 00:42:47 2011
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java:1159757-1173011
+/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java:1159757-1177128
 /hadoop/core/branches/branch-0.19/hdfs/src/java:713112
 /hadoop/core/branches/branch-0.19/hdfs/src/main/java:713112
 /hadoop/core/trunk/src/hdfs:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java Thu Sep 29 00:42:47 2011
@@ -38,6 +38,7 @@ import java.util.Random;
 import java.util.StringTokenizer;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
@@ -577,17 +578,6 @@ public class DFSUtil {
     }
   }
   
-  /**
-   * Returns the configured nameservice Id
-   * 
-   * @param conf
-   *          Configuration object to lookup the nameserviceId
-   * @return nameserviceId string from conf
-   */
-  public static String getNameServiceId(Configuration conf) {
-    return conf.get(DFS_FEDERATION_NAMESERVICE_ID);
-  }
-  
   /** Return used as percentage of capacity */
   public static float getPercentUsed(long used, long capacity) {
     return capacity <= 0 ? 100 : ((float)used * 100.0f)/(float)capacity; 
@@ -707,4 +697,77 @@ public class DFSUtil {
     // TODO:HA configuration changes pending
     return false;
   }
+  
+  /**
+   * Get name service Id for the {@link NameNode} based on namenode RPC address
+   * matching the local node address.
+   */
+  public static String getNamenodeNameServiceId(Configuration conf) {
+    return getNameServiceId(conf, DFS_NAMENODE_RPC_ADDRESS_KEY);
+  }
+  
+  /**
+   * Get name service Id for the BackupNode based on backup node RPC address
+   * matching the local node address.
+   */
+  public static String getBackupNameServiceId(Configuration conf) {
+    return getNameServiceId(conf, DFS_NAMENODE_BACKUP_ADDRESS_KEY);
+  }
+  
+  /**
+   * Get name service Id for the secondary node based on secondary http address
+   * matching the local node address.
+   */
+  public static String getSecondaryNameServiceId(Configuration conf) {
+    return getNameServiceId(conf, DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY);
+  }
+  
+  /**
+   * Get the nameservice Id by matching the {@code addressKey} with the
+   * the address of the local node. 
+   * 
+   * If {@link DFSConfigKeys#DFS_FEDERATION_NAMESERVICE_ID} is not specifically
+   * configured, this method determines the nameservice Id by matching the local
+   * nodes address with the configured addresses. When a match is found, it
+   * returns the nameservice Id from the corresponding configuration key.
+   * 
+   * @param conf Configuration
+   * @param addressKey configuration key to get the address.
+   * @return name service Id on success, null on failure.
+   * @throws HadoopIllegalArgumentException on error
+   */
+  private static String getNameServiceId(Configuration conf, String addressKey) {
+    String nameserviceId = conf.get(DFS_FEDERATION_NAMESERVICE_ID);
+    if (nameserviceId != null) {
+      return nameserviceId;
+    }
+    
+    Collection<String> ids = getNameServiceIds(conf);
+    if (ids == null || ids.size() == 0) {
+      // Not federation configuration, hence no nameservice Id
+      return null;
+    }
+    
+    // Match the rpc address with that of local address
+    int found = 0;
+    for (String id : ids) {
+      String addr = conf.get(getNameServiceIdKey(addressKey, id));
+      InetSocketAddress s = NetUtils.createSocketAddr(addr);
+      if (NetUtils.isLocalAddress(s.getAddress())) {
+        nameserviceId = id;
+        found++;
+      }
+    }
+    if (found > 1) { // Only one address must match the local address
+      throw new HadoopIllegalArgumentException(
+          "Configuration has multiple RPC addresses that matches "
+              + "the local node's address. Please configure the system with "
+              + "the parameter " + DFS_FEDERATION_NAMESERVICE_ID);
+    }
+    if (found == 0) {
+      throw new HadoopIllegalArgumentException("Configuration address "
+          + addressKey + " is missing in configuration with name service Id");
+    }
+    return nameserviceId;
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java Thu Sep 29 00:42:47 2011
@@ -115,6 +115,26 @@ public class DatanodeInfo extends Datano
     this.location = location;
     this.hostName = hostName;
   }
+
+  /** Constructor */
+  public DatanodeInfo(final String name, final String storageID,
+      final int infoPort, final int ipcPort,
+      final long capacity, final long dfsUsed, final long remaining,
+      final long blockPoolUsed, final long lastUpdate, final int xceiverCount,
+      final String networkLocation, final String hostName,
+      final AdminStates adminState) {
+    super(name, storageID, infoPort, ipcPort);
+
+    this.capacity = capacity;
+    this.dfsUsed = dfsUsed;
+    this.remaining = remaining;
+    this.blockPoolUsed = blockPoolUsed;
+    this.lastUpdate = lastUpdate;
+    this.xceiverCount = xceiverCount;
+    this.location = networkLocation;
+    this.hostName = hostName;
+    this.adminState = adminState;
+  }
   
   /** The raw capacity. */
   public long getCapacity() { return capacity; }

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java Thu Sep 29 00:42:47 2011
@@ -308,6 +308,11 @@ public class BlockManager {
   /** Dump meta data to out. */
   public void metaSave(PrintWriter out) {
     assert namesystem.hasWriteLock();
+    final List<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
+    final List<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
+    datanodeManager.fetchDatanodes(live, dead, false);
+    out.println("Live Datanodes: " + live.size());
+    out.println("Dead Datanodes: " + dead.size());
     //
     // Dump contents of neededReplication
     //
@@ -842,7 +847,7 @@ public class BlockManager {
 
     // Add this replica to corruptReplicas Map
     corruptReplicas.addToCorruptReplicasMap(storedBlock, node);
-    if (countNodes(storedBlock).liveReplicas() > inode.getReplication()) {
+    if (countNodes(storedBlock).liveReplicas() >= inode.getReplication()) {
       // the block is over-replicated so invalidate the replicas immediately
       invalidateBlock(storedBlock, node);
     } else if (namesystem.isPopulatingReplQueues()) {
@@ -867,7 +872,7 @@ public class BlockManager {
     // Check how many copies we have of the block. If we have at least one
     // copy on a live node, then we can delete it.
     int count = countNodes(blk).liveReplicas();
-    if (count > 1) {
+    if (count >= 1) {
       addToInvalidates(blk, dn);
       removeStoredBlock(blk, node);
       if(NameNode.stateChangeLog.isDebugEnabled()) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java Thu Sep 29 00:42:47 2011
@@ -54,11 +54,13 @@ import org.apache.hadoop.hdfs.security.t
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
+import org.apache.hadoop.hdfs.web.resources.DelegationParam;
 import org.apache.hadoop.hdfs.web.resources.UserParam;
 import org.apache.hadoop.http.HtmlQuoting;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.Token;
@@ -68,7 +70,7 @@ import org.apache.hadoop.util.VersionInf
 public class JspHelper {
   public static final String CURRENT_CONF = "current.conf";
   final static public String WEB_UGI_PROPERTY_NAME = DFSConfigKeys.DFS_WEB_UGI_KEY;
-  public static final String DELEGATION_PARAMETER_NAME = "delegation";
+  public static final String DELEGATION_PARAMETER_NAME = DelegationParam.NAME;
   public static final String NAMENODE_ADDRESS = "nnaddr";
   static final String SET_DELEGATION = "&" + DELEGATION_PARAMETER_NAME +
                                               "=";
@@ -551,7 +553,8 @@ public class JspHelper {
         DelegationTokenIdentifier id = new DelegationTokenIdentifier();
         id.readFields(in);
         ugi = id.getUser();
-        checkUsername(ugi.getUserName(), user);
+        checkUsername(ugi.getShortUserName(), usernameFromQuery);
+        checkUsername(ugi.getShortUserName(), user);
         ugi.addToken(token);
         ugi.setAuthenticationMethod(AuthenticationMethod.TOKEN);
       } else {
@@ -560,13 +563,11 @@ public class JspHelper {
                                 "authenticated by filter");
         }
         ugi = UserGroupInformation.createRemoteUser(user);
+        checkUsername(ugi.getShortUserName(), usernameFromQuery);
         // This is not necessarily true, could have been auth'ed by user-facing
         // filter
         ugi.setAuthenticationMethod(secureAuthMethod);
       }
-
-      checkUsername(user, usernameFromQuery);
-
     } else { // Security's not on, pull from url
       ugi = usernameFromQuery == null?
           getDefaultWebUser(conf) // not specified in request
@@ -579,10 +580,18 @@ public class JspHelper {
     return ugi;
   }
 
+  /**
+   * Expected user name should be a short name.
+   */
   private static void checkUsername(final String expected, final String name
       ) throws IOException {
-    if (name != null && !name.equals(expected)) {
-      throw new IOException("Usernames not matched: name=" + name
+    if (name == null) {
+      return;
+    }
+    KerberosName u = new KerberosName(name);
+    String shortName = u.getShortName();
+    if (!shortName.equals(expected)) {
+      throw new IOException("Usernames not matched: name=" + shortName
           + " != expected=" + expected);
     }
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java Thu Sep 29 00:42:47 2011
@@ -425,7 +425,7 @@ public class DataNode extends Configured
   private List<ServicePlugin> plugins;
   
   // For InterDataNodeProtocol
-  public Server ipcServer;
+  public RPC.Server ipcServer;
 
   private SecureResources secureResources = null;
   private AbstractList<File> dataDirs;
@@ -575,11 +575,15 @@ public class DataNode extends Configured
   private void initIpcServer(Configuration conf) throws IOException {
     InetSocketAddress ipcAddr = NetUtils.createSocketAddr(
         conf.get("dfs.datanode.ipc.address"));
-    ipcServer = RPC.getServer(DataNode.class, this, ipcAddr.getHostName(),
+    
+    // Add all the RPC protocols that the Datanode implements
+    ipcServer = RPC.getServer(ClientDatanodeProtocol.class, this, ipcAddr.getHostName(),
                               ipcAddr.getPort(), 
                               conf.getInt(DFS_DATANODE_HANDLER_COUNT_KEY, 
                                           DFS_DATANODE_HANDLER_COUNT_DEFAULT), 
                               false, conf, blockPoolTokenSecretManager);
+    ipcServer.addProtocol(InterDatanodeProtocol.class, this);
+    
     // set service-level authorization security policy
     if (conf.getBoolean(
         CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java Thu Sep 29 00:42:47 2011
@@ -50,6 +50,7 @@ import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.web.ParamFilter;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
 import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
@@ -66,8 +67,11 @@ import org.apache.hadoop.hdfs.web.resour
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 
+import com.sun.jersey.spi.container.ResourceFilters;
+
 /** Web-hdfs DataNode implementation. */
 @Path("")
+@ResourceFilters(ParamFilter.class)
 public class DatanodeWebHdfsMethods {
   public static final Log LOG = LogFactory.getLog(DatanodeWebHdfsMethods.class);
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java Thu Sep 29 00:42:47 2011
@@ -25,6 +25,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
@@ -372,4 +373,9 @@ public class BackupNode extends NameNode
       throw new UnsupportedActionException(msg);
     }
   }
+  
+  @Override
+  protected String getNameServiceId(Configuration conf) {
+    return DFSUtil.getBackupNameServiceId(conf);
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java Thu Sep 29 00:42:47 2011
@@ -120,7 +120,7 @@ public class FSDirectory implements Clos
     this.cond = dirLock.writeLock().newCondition();
     rootDir = new INodeDirectoryWithQuota(INodeDirectory.ROOT_NAME,
         ns.createFsOwnerPermissions(new FsPermission((short)0755)),
-        Integer.MAX_VALUE, UNKNOWN_DISK_SPACE);
+        Long.MAX_VALUE, UNKNOWN_DISK_SPACE);
     this.fsImage = fsImage;
     int configuredLimit = conf.getInt(
         DFSConfigKeys.DFS_LIST_LIMIT, DFSConfigKeys.DFS_LIST_LIMIT_DEFAULT);

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Thu Sep 29 00:42:47 2011
@@ -564,11 +564,6 @@ public class FSNamesystem implements Nam
       out.println(totalInodes + " files and directories, " + totalBlocks
           + " blocks = " + (totalInodes + totalBlocks) + " total");
 
-      final List<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
-      final List<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
-      blockManager.getDatanodeManager().fetchDatanodes(live, dead, false);
-      out.println("Live Datanodes: "+live.size());
-      out.println("Dead Datanodes: "+dead.size());
       blockManager.metaSave(out);
 
       out.flush();

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java Thu Sep 29 00:42:47 2011
@@ -27,6 +27,7 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ha.HealthCheckFailedException;
@@ -380,7 +381,6 @@ public class NameNode {
    * @param conf the configuration
    */
   protected void initialize(Configuration conf) throws IOException {
-    initializeGenericKeys(conf);
     UserGroupInformation.setConfiguration(conf);
     loginAsNameNodeUser(conf);
 
@@ -513,10 +513,14 @@ public class NameNode {
     this.haEnabled = DFSUtil.isHAEnabled(conf);
     this.state = !haEnabled ? ACTIVE_STATE : STANDBY_STATE;
     try {
+      initializeGenericKeys(conf, getNameServiceId(conf));
       initialize(conf);
     } catch (IOException e) {
       this.stop();
       throw e;
+    } catch (HadoopIllegalArgumentException e) {
+      this.stop();
+      throw e;
     }
   }
 
@@ -821,16 +825,16 @@ public class NameNode {
    * @param conf
    *          Configuration object to lookup specific key and to set the value
    *          to the key passed. Note the conf object is modified
+   * @param nameserviceId name service Id
    * @see DFSUtil#setGenericConf(Configuration, String, String...)
    */
-  public static void initializeGenericKeys(Configuration conf) {
-    final String nameserviceId = DFSUtil.getNameServiceId(conf);
+  public static void initializeGenericKeys(Configuration conf, String
+      nameserviceId) {
     if ((nameserviceId == null) || nameserviceId.isEmpty()) {
       return;
     }
     
     DFSUtil.setGenericConf(conf, nameserviceId, NAMESERVICE_SPECIFIC_KEYS);
-    
     if (conf.get(DFS_NAMENODE_RPC_ADDRESS_KEY) != null) {
       URI defaultUri = URI.create(HdfsConstants.HDFS_URI_SCHEME + "://"
           + conf.get(DFS_NAMENODE_RPC_ADDRESS_KEY));
@@ -838,6 +842,14 @@ public class NameNode {
     }
   }
     
+  /** 
+   * Get the name service Id for the node
+   * @return name service Id or null if federation is not configured
+   */
+  protected String getNameServiceId(Configuration conf) {
+    return DFSUtil.getNamenodeNameServiceId(conf);
+  }
+  
   /**
    */
   public static void main(String argv[]) throws Exception {

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java Thu Sep 29 00:42:47 2011
@@ -66,6 +66,7 @@ import org.apache.hadoop.hdfs.server.com
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
 import org.apache.hadoop.hdfs.server.namenode.NameNode.OperationCategory;
 import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics;
+import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
 import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol;
@@ -145,10 +146,17 @@ class NameNodeRpcServer implements Namen
       serviceRpcServer = null;
       serviceRPCAddress = null;
     }
-    this.server = RPC.getServer(NamenodeProtocols.class, this,
+    // Add all the RPC protocols that the namenode implements
+    this.server = RPC.getServer(ClientProtocol.class, this,
                                 socAddr.getHostName(), socAddr.getPort(),
                                 handlerCount, false, conf, 
                                 namesystem.getDelegationTokenSecretManager());
+    this.server.addProtocol(DatanodeProtocol.class, this);
+    this.server.addProtocol(NamenodeProtocol.class, this);
+    this.server.addProtocol(RefreshAuthorizationPolicyProtocol.class, this);
+    this.server.addProtocol(RefreshUserMappingsProtocol.class, this);
+    this.server.addProtocol(GetUserMappingsProtocol.class, this);
+    
 
     // set service-level authorization security policy
     if (serviceAuthEnabled =
@@ -971,8 +979,11 @@ class NameNodeRpcServer implements Namen
   }
 
   private static String getClientMachine() {
-    String clientMachine = Server.getRemoteAddress();
-    if (clientMachine == null) {
+    String clientMachine = NamenodeWebHdfsMethods.getRemoteAddress();
+    if (clientMachine == null) { //not a web client
+      clientMachine = Server.getRemoteAddress();
+    }
+    if (clientMachine == null) { //not a RPC client
       clientMachine = "";
     }
     return clientMachine;

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java Thu Sep 29 00:42:47 2011
@@ -38,10 +38,12 @@ import org.apache.commons.cli.ParseExcep
 import org.apache.commons.cli.PosixParser;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
+
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.DFSUtil.ErrorSimulator;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
@@ -173,12 +175,17 @@ public class SecondaryNameNode implement
   public SecondaryNameNode(Configuration conf,
       CommandLineOpts commandLineOpts) throws IOException {
     try {
-      NameNode.initializeGenericKeys(conf);
+      NameNode.initializeGenericKeys(conf,
+          DFSUtil.getSecondaryNameServiceId(conf));
       initialize(conf, commandLineOpts);
     } catch(IOException e) {
       shutdown();
       LOG.fatal("Failed to start secondary namenode. ", e);
       throw e;
+    } catch(HadoopIllegalArgumentException e) {
+      shutdown();
+      LOG.fatal("Failed to start secondary namenode. ", e);
+      throw e;
     }
   }
   

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Thu Sep 29 00:42:47 2011
@@ -57,6 +57,7 @@ import org.apache.hadoop.hdfs.server.com
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.hdfs.web.JsonUtil;
+import org.apache.hadoop.hdfs.web.ParamFilter;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
 import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
@@ -78,6 +79,7 @@ import org.apache.hadoop.hdfs.web.resour
 import org.apache.hadoop.hdfs.web.resources.PutOpParam;
 import org.apache.hadoop.hdfs.web.resources.RecursiveParam;
 import org.apache.hadoop.hdfs.web.resources.RenameOptionSetParam;
+import org.apache.hadoop.hdfs.web.resources.RenewerParam;
 import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
 import org.apache.hadoop.hdfs.web.resources.UriFsPathParam;
 import org.apache.hadoop.hdfs.web.resources.UserParam;
@@ -89,10 +91,20 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 
+import com.sun.jersey.spi.container.ResourceFilters;
+
 /** Web-hdfs NameNode implementation. */
 @Path("")
+@ResourceFilters(ParamFilter.class)
 public class NamenodeWebHdfsMethods {
-  private static final Log LOG = LogFactory.getLog(NamenodeWebHdfsMethods.class);
+  public static final Log LOG = LogFactory.getLog(NamenodeWebHdfsMethods.class);
+
+  private static final ThreadLocal<String> REMOTE_ADDRESS = new ThreadLocal<String>(); 
+
+  /** @return the remote client address. */
+  public static String getRemoteAddress() {
+    return REMOTE_ADDRESS.get();
+  }
 
   private @Context ServletContext context;
   private @Context HttpServletRequest request;
@@ -215,6 +227,8 @@ public class NamenodeWebHdfsMethods {
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
       public Response run() throws IOException, URISyntaxException {
+        REMOTE_ADDRESS.set(request.getRemoteAddr());
+        try {
 
     final String fullpath = path.getAbsolutePath();
     final NameNode namenode = (NameNode)context.getAttribute("name.node");
@@ -272,6 +286,10 @@ public class NamenodeWebHdfsMethods {
     default:
       throw new UnsupportedOperationException(op + " is not supported");
     }
+
+        } finally {
+          REMOTE_ADDRESS.set(null);
+        }
       }
     });
   }
@@ -301,6 +319,8 @@ public class NamenodeWebHdfsMethods {
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
       public Response run() throws IOException, URISyntaxException {
+        REMOTE_ADDRESS.set(request.getRemoteAddr());
+        try {
 
     final String fullpath = path.getAbsolutePath();
     final NameNode namenode = (NameNode)context.getAttribute("name.node");
@@ -315,6 +335,10 @@ public class NamenodeWebHdfsMethods {
     default:
       throw new UnsupportedOperationException(op + " is not supported");
     }
+
+        } finally {
+          REMOTE_ADDRESS.set(null);
+        }
       }
     });
   }
@@ -335,10 +359,12 @@ public class NamenodeWebHdfsMethods {
           final OffsetParam offset,
       @QueryParam(LengthParam.NAME) @DefaultValue(LengthParam.DEFAULT)
           final LengthParam length,
+      @QueryParam(RenewerParam.NAME) @DefaultValue(RenewerParam.DEFAULT)
+          final RenewerParam renewer,
       @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
           final BufferSizeParam bufferSize
       ) throws IOException, URISyntaxException, InterruptedException {
-    return get(ugi, delegation, ROOT, op, offset, length, bufferSize);
+    return get(ugi, delegation, ROOT, op, offset, length, renewer, bufferSize);
   }
 
   /** Handle HTTP GET request. */
@@ -356,19 +382,23 @@ public class NamenodeWebHdfsMethods {
           final OffsetParam offset,
       @QueryParam(LengthParam.NAME) @DefaultValue(LengthParam.DEFAULT)
           final LengthParam length,
+      @QueryParam(RenewerParam.NAME) @DefaultValue(RenewerParam.DEFAULT)
+          final RenewerParam renewer,
       @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
           final BufferSizeParam bufferSize
       ) throws IOException, URISyntaxException, InterruptedException {
 
     if (LOG.isTraceEnabled()) {
       LOG.trace(op + ": " + path + ", ugi=" + ugi
-          + Param.toSortedString(", ", offset, length, bufferSize));
+          + Param.toSortedString(", ", offset, length, renewer, bufferSize));
     }
 
 
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
       public Response run() throws IOException, URISyntaxException {
+        REMOTE_ADDRESS.set(request.getRemoteAddr());
+        try {
 
     final NameNode namenode = (NameNode)context.getAttribute("name.node");
     final String fullpath = path.getAbsolutePath();
@@ -381,6 +411,15 @@ public class NamenodeWebHdfsMethods {
           op.getValue(), offset.getValue(), offset, length, bufferSize);
       return Response.temporaryRedirect(uri).build();
     }
+    case GETFILEBLOCKLOCATIONS:
+    {
+      final long offsetValue = offset.getValue();
+      final Long lengthValue = length.getValue();
+      final LocatedBlocks locatedblocks = np.getBlockLocations(fullpath,
+          offsetValue, lengthValue != null? lengthValue: offsetValue + 1);
+      final String js = JsonUtil.toJsonString(locatedblocks);
+      return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+    }
     case GETFILESTATUS:
     {
       final HdfsFileStatus status = np.getFileInfo(fullpath);
@@ -392,9 +431,20 @@ public class NamenodeWebHdfsMethods {
       final StreamingOutput streaming = getListingStream(np, fullpath);
       return Response.ok(streaming).type(MediaType.APPLICATION_JSON).build();
     }
+    case GETDELEGATIONTOKEN:
+    {
+      final Token<? extends TokenIdentifier> token = generateDelegationToken(
+          namenode, ugi, renewer.getValue());
+      final String js = JsonUtil.toJsonString(token);
+      return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+    }
     default:
       throw new UnsupportedOperationException(op + " is not supported");
     }    
+
+        } finally {
+          REMOTE_ADDRESS.set(null);
+        }
       }
     });
   }
@@ -462,6 +512,9 @@ public class NamenodeWebHdfsMethods {
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
       public Response run() throws IOException {
+        REMOTE_ADDRESS.set(request.getRemoteAddr());
+        try {
+
         final NameNode namenode = (NameNode)context.getAttribute("name.node");
         final String fullpath = path.getAbsolutePath();
 
@@ -475,6 +528,10 @@ public class NamenodeWebHdfsMethods {
         default:
           throw new UnsupportedOperationException(op + " is not supported");
         }
+
+        } finally {
+          REMOTE_ADDRESS.set(null);
+        }
       }
     });
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java Thu Sep 29 00:42:47 2011
@@ -149,7 +149,9 @@ public class DelegationTokenFetcher {
                 DataInputStream in = new DataInputStream(
                     new ByteArrayInputStream(token.getIdentifier()));
                 id.readFields(in);
-                System.out.println("Token (" + id + ") for " + token.getService());
+                if(LOG.isDebugEnabled()) {
+                  LOG.debug("Token (" + id + ") for " + token.getService());
+                }
               }
               return null;
             }
@@ -160,22 +162,28 @@ public class DelegationTokenFetcher {
                 for (Token<?> token : readTokens(tokenFile, conf)) {
                   result = renewDelegationToken(webUrl,
                       (Token<DelegationTokenIdentifier>) token);
-                  System.out.println("Renewed token via " + webUrl + " for "
-                      + token.getService() + " until: " + new Date(result));
+                  if(LOG.isDebugEnabled()) {
+                	  LOG.debug("Renewed token via " + webUrl + " for "
+                          + token.getService() + " until: " + new Date(result));
+                  }
                 }
               } else if (cancel) {
                 for (Token<?> token : readTokens(tokenFile, conf)) {
                   cancelDelegationToken(webUrl,
                       (Token<DelegationTokenIdentifier>) token);
-                  System.out.println("Cancelled token via " + webUrl + " for "
-                      + token.getService());
+                  if(LOG.isDebugEnabled()) {
+                    LOG.debug("Cancelled token via " + webUrl + " for "
+                	    + token.getService());
+                  }
                 }
               } else {
                 Credentials creds = getDTfromRemote(webUrl, renewer);
                 creds.writeTokenStorageFile(tokenFile, conf);
                 for (Token<?> token : creds.getAllTokens()) {
-                  System.out.println("Fetched token via " + webUrl + " for "
-                      + token.getService() + " into " + tokenFile);
+                  if(LOG.isDebugEnabled()) {	
+                    LOG.debug("Fetched token via " + webUrl + " for "
+                        + token.getService() + " into " + tokenFile);
+                  }
                 }
               }
             } else {
@@ -184,24 +192,30 @@ public class DelegationTokenFetcher {
                 for (Token<?> token : readTokens(tokenFile, conf)) {
                   ((DistributedFileSystem) fs)
                       .cancelDelegationToken((Token<DelegationTokenIdentifier>) token);
-                  System.out.println("Cancelled token for "
-                      + token.getService());
+                  if(LOG.isDebugEnabled()) {
+                    LOG.debug("Cancelled token for "
+                        + token.getService());
+                  }
                 }
               } else if (renew) {
                 long result;
                 for (Token<?> token : readTokens(tokenFile, conf)) {
                   result = ((DistributedFileSystem) fs)
                       .renewDelegationToken((Token<DelegationTokenIdentifier>) token);
-                  System.out.println("Renewed token for " + token.getService()
-                      + " until: " + new Date(result));
+                  if(LOG.isDebugEnabled()) {
+                    LOG.debug("Renewed token for " + token.getService()
+                        + " until: " + new Date(result));
+                  }
                 }
               } else {
                 Token<?> token = fs.getDelegationToken(renewer);
                 Credentials cred = new Credentials();
                 cred.addToken(token.getService(), token);
                 cred.writeTokenStorageFile(tokenFile, conf);
-                System.out.println("Fetched token for " + token.getService()
-                    + " into " + tokenFile);
+                if(LOG.isDebugEnabled()) {
+                  LOG.debug("Fetched token for " + token.getService()
+                      + " into " + tokenFile);
+                }
               }
             }
             return null;
@@ -221,6 +235,11 @@ public class DelegationTokenFetcher {
       } else {
         url.append(nnAddr).append(GetDelegationTokenServlet.PATH_SPEC);
       }
+      
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Retrieving token from: " + url);
+      }
+      
       URL remoteURL = new URL(url.toString());
       SecurityUtil.fetchServiceTicket(remoteURL);
       URLConnection connection = remoteURL.openConnection();

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java Thu Sep 29 00:42:47 2011
@@ -17,19 +17,31 @@
  */
 package org.apache.hadoop.hdfs.web;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
+import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
 import org.mortbay.util.ajax.JSON;
 
 /** JSON Utilities */
 public class JsonUtil {
-  private static final ThreadLocal<Map<String, Object>> jsonMap
-      = new ThreadLocal<Map<String, Object>>() {
+  private static class ThreadLocalMap extends ThreadLocal<Map<String, Object>> {
     @Override
     protected Map<String, Object> initialValue() {
       return new TreeMap<String, Object>();
@@ -41,7 +53,54 @@ public class JsonUtil {
       m.clear();
       return m;
     }
-  };
+  }
+
+  private static final ThreadLocalMap jsonMap = new ThreadLocalMap();
+  private static final ThreadLocalMap tokenMap = new ThreadLocalMap();
+  private static final ThreadLocalMap datanodeInfoMap = new ThreadLocalMap();
+  private static final ThreadLocalMap extendedBlockMap = new ThreadLocalMap();
+  private static final ThreadLocalMap locatedBlockMap = new ThreadLocalMap();
+
+  private static final DatanodeInfo[] EMPTY_DATANODE_INFO_ARRAY = {};
+
+  /** Convert a token object to a Json string. */
+  public static String toJsonString(final Token<? extends TokenIdentifier> token
+      ) throws IOException {
+    if (token == null) {
+      return null;
+    }
+
+    final Map<String, Object> m = tokenMap.get();
+    m.put("urlString", token.encodeToUrlString());
+    return JSON.toString(m);
+  }
+
+  /** Convert a Json map to a Token. */
+  public static Token<? extends TokenIdentifier> toToken(
+      final Map<?, ?> m) throws IOException {
+    if (m == null) {
+      return null;
+    }
+
+    final Token<DelegationTokenIdentifier> token
+        = new Token<DelegationTokenIdentifier>();
+    token.decodeFromUrlString((String)m.get("urlString"));
+    return token;
+  }
+
+  /** Convert a Json map to a Token of DelegationTokenIdentifier. */
+  @SuppressWarnings("unchecked")
+  public static Token<DelegationTokenIdentifier> toDelegationToken(
+      final Map<?, ?> m) throws IOException {
+    return (Token<DelegationTokenIdentifier>)toToken(m);
+  }
+
+  /** Convert a Json map to a Token of BlockTokenIdentifier. */
+  @SuppressWarnings("unchecked")
+  public static Token<BlockTokenIdentifier> toBlockToken(
+      final Map<?, ?> m) throws IOException {
+    return (Token<BlockTokenIdentifier>)toToken(m);
+  }
 
   /** Convert an exception object to a Json string. */
   public static String toJsonString(final Exception e) {
@@ -77,11 +136,10 @@ public class JsonUtil {
 
   /** Convert a HdfsFileStatus object to a Json string. */
   public static String toJsonString(final HdfsFileStatus status) {
-    final Map<String, Object> m = jsonMap.get();
     if (status == null) {
-      m.put("isNull", true);
+      return null;
     } else {
-      m.put("isNull", false);
+      final Map<String, Object> m = jsonMap.get();
       m.put("localName", status.getLocalName());
       m.put("isDir", status.isDir());
       m.put("isSymlink", status.isSymlink());
@@ -97,8 +155,8 @@ public class JsonUtil {
       m.put("modificationTime", status.getModificationTime());
       m.put("blockSize", status.getBlockSize());
       m.put("replication", status.getReplication());
+      return JSON.toString(m);
     }
-    return JSON.toString(m);
   }
 
   @SuppressWarnings("unchecked")
@@ -106,9 +164,9 @@ public class JsonUtil {
     return (Map<String, Object>) JSON.parse(jsonString);
   }
 
-  /** Convert a Json string to a HdfsFileStatus object. */
+  /** Convert a Json map to a HdfsFileStatus object. */
   public static HdfsFileStatus toFileStatus(final Map<String, Object> m) {
-    if ((Boolean)m.get("isNull")) {
+    if (m == null) {
       return null;
     }
 
@@ -130,4 +188,214 @@ public class JsonUtil {
         permission, owner, group,
         symlink, DFSUtil.string2Bytes(localName));
   }
+
+  /** Convert a LocatedBlock to a Json string. */
+  public static String toJsonString(final ExtendedBlock extendedblock) {
+    if (extendedblock == null) {
+      return null;
+    }
+
+    final Map<String, Object> m = extendedBlockMap.get();
+    m.put("blockPoolId", extendedblock.getBlockPoolId());
+    m.put("blockId", extendedblock.getBlockId());
+    m.put("numBytes", extendedblock.getNumBytes());
+    m.put("generationStamp", extendedblock.getGenerationStamp());
+    return JSON.toString(m);
+  }
+
+  /** Convert a Json map to an ExtendedBlock object. */
+  public static ExtendedBlock toExtendedBlock(final Map<?, ?> m) {
+    if (m == null) {
+      return null;
+    }
+    
+    final String blockPoolId = (String)m.get("blockPoolId");
+    final long blockId = (Long)m.get("blockId");
+    final long numBytes = (Long)m.get("numBytes");
+    final long generationStamp = (Long)m.get("generationStamp");
+    return new ExtendedBlock(blockPoolId, blockId, numBytes, generationStamp);
+  }
+  
+  /** Convert a DatanodeInfo to a Json string. */
+  public static String toJsonString(final DatanodeInfo datanodeinfo) {
+    if (datanodeinfo == null) {
+      return null;
+    }
+
+    final Map<String, Object> m = datanodeInfoMap.get();
+    m.put("name", datanodeinfo.getName());
+    m.put("storageID", datanodeinfo.getStorageID());
+    m.put("infoPort", datanodeinfo.getInfoPort());
+
+    m.put("ipcPort", datanodeinfo.getIpcPort());
+
+    m.put("capacity", datanodeinfo.getCapacity());
+    m.put("dfsUsed", datanodeinfo.getDfsUsed());
+    m.put("remaining", datanodeinfo.getRemaining());
+    m.put("blockPoolUsed", datanodeinfo.getBlockPoolUsed());
+    m.put("lastUpdate", datanodeinfo.getLastUpdate());
+    m.put("xceiverCount", datanodeinfo.getXceiverCount());
+    m.put("networkLocation", datanodeinfo.getNetworkLocation());
+    m.put("hostName", datanodeinfo.getHostName());
+    m.put("adminState", datanodeinfo.getAdminState().name());
+    return JSON.toString(m);
+  }
+
+  /** Convert a Json map to an DatanodeInfo object. */
+  public static DatanodeInfo toDatanodeInfo(final Map<?, ?> m) {
+    if (m == null) {
+      return null;
+    }
+
+    return new DatanodeInfo(
+        (String)m.get("name"),
+        (String)m.get("storageID"),
+        (int)(long)(Long)m.get("infoPort"),
+        (int)(long)(Long)m.get("ipcPort"),
+
+        (Long)m.get("capacity"),
+        (Long)m.get("dfsUsed"),
+        (Long)m.get("remaining"),
+        (Long)m.get("blockPoolUsed"),
+        (Long)m.get("lastUpdate"),
+        (int)(long)(Long)m.get("xceiverCount"),
+        (String)m.get("networkLocation"),
+        (String)m.get("hostName"),
+        AdminStates.valueOf((String)m.get("adminState")));
+  }
+
+  /** Convert a DatanodeInfo[] to a Json string. */
+  public static String toJsonString(final DatanodeInfo[] array
+      ) throws IOException {
+    if (array == null) {
+      return null;
+    } else if (array.length == 0) {
+      return "[]";
+    } else {
+      final StringBuilder b = new StringBuilder().append('[').append(
+          toJsonString(array[0]));
+      for(int i = 1; i < array.length; i++) {
+        b.append(", ").append(toJsonString(array[i]));
+      }
+      return b.append(']').toString();
+    }
+  }
+
+  /** Convert an Object[] to a DatanodeInfo[]. */
+  public static DatanodeInfo[] toDatanodeInfoArray(final Object[] objects) {
+    if (objects == null) {
+      return null;
+    } else if (objects.length == 0) {
+      return EMPTY_DATANODE_INFO_ARRAY;
+    } else {
+      final DatanodeInfo[] array = new DatanodeInfo[objects.length];
+      for(int i = 0; i < array.length; i++) {
+        array[i] = (DatanodeInfo)toDatanodeInfo((Map<?, ?>) objects[i]);
+      }
+      return array;
+    }
+  }
+
+  /** Convert a LocatedBlock to a Json string. */
+  public static String toJsonString(final LocatedBlock locatedblock
+      ) throws IOException {
+    if (locatedblock == null) {
+      return null;
+    }
+ 
+    final Map<String, Object> m = locatedBlockMap.get();
+    m.put("blockToken", toJsonString(locatedblock.getBlockToken()));
+    m.put("isCorrupt", locatedblock.isCorrupt());
+    m.put("startOffset", locatedblock.getStartOffset());
+    m.put("block", toJsonString(locatedblock.getBlock()));
+
+    m.put("locations", toJsonString(locatedblock.getLocations()));
+    return JSON.toString(m);
+  }
+
+  /** Convert a Json map to LocatedBlock. */
+  public static LocatedBlock toLocatedBlock(final Map<?, ?> m) throws IOException {
+    if (m == null) {
+      return null;
+    }
+
+    final ExtendedBlock b = toExtendedBlock((Map<?, ?>)JSON.parse((String)m.get("block")));
+    final DatanodeInfo[] locations = toDatanodeInfoArray(
+        (Object[])JSON.parse((String)m.get("locations")));
+    final long startOffset = (Long)m.get("startOffset");
+    final boolean isCorrupt = (Boolean)m.get("isCorrupt");
+
+    final LocatedBlock locatedblock = new LocatedBlock(b, locations, startOffset, isCorrupt);
+    locatedblock.setBlockToken(toBlockToken((Map<?, ?>)JSON.parse((String)m.get("blockToken"))));
+    return locatedblock;
+  }
+
+  /** Convert a LocatedBlock[] to a Json string. */
+  public static String toJsonString(final List<LocatedBlock> array
+      ) throws IOException {
+    if (array == null) {
+      return null;
+    } else if (array.size() == 0) {
+      return "[]";
+    } else {
+      final StringBuilder b = new StringBuilder().append('[').append(
+          toJsonString(array.get(0)));
+      for(int i = 1; i < array.size(); i++) {
+        b.append(",\n  ").append(toJsonString(array.get(i)));
+      }
+      return b.append(']').toString();
+    }
+  }
+
+  /** Convert an Object[] to a List of LocatedBlock. 
+   * @throws IOException */
+  public static List<LocatedBlock> toLocatedBlockList(final Object[] objects
+      ) throws IOException {
+    if (objects == null) {
+      return null;
+    } else if (objects.length == 0) {
+      return Collections.emptyList();
+    } else {
+      final List<LocatedBlock> list = new ArrayList<LocatedBlock>(objects.length);
+      for(int i = 0; i < objects.length; i++) {
+        list.add((LocatedBlock)toLocatedBlock((Map<?, ?>)objects[i]));
+      }
+      return list;
+    }
+  }
+
+  /** Convert LocatedBlocks to a Json string. */
+  public static String toJsonString(final LocatedBlocks locatedblocks
+      ) throws IOException {
+    if (locatedblocks == null) {
+      return null;
+    }
+
+    final Map<String, Object> m = jsonMap.get();
+    m.put("fileLength", locatedblocks.getFileLength());
+    m.put("isUnderConstruction", locatedblocks.isUnderConstruction());
+
+    m.put("locatedBlocks", toJsonString(locatedblocks.getLocatedBlocks()));
+    m.put("lastLocatedBlock", toJsonString(locatedblocks.getLastLocatedBlock()));
+    m.put("isLastBlockComplete", locatedblocks.isLastBlockComplete());
+    return JSON.toString(m);
+  }
+
+  /** Convert a Json map to LocatedBlock. */
+  public static LocatedBlocks toLocatedBlocks(final Map<String, Object> m
+      ) throws IOException {
+    if (m == null) {
+      return null;
+    }
+    
+    final long fileLength = (Long)m.get("fileLength");
+    final boolean isUnderConstruction = (Boolean)m.get("isUnderConstruction");
+    final List<LocatedBlock> locatedBlocks = toLocatedBlockList(
+        (Object[])JSON.parse((String) m.get("locatedBlocks")));
+    final LocatedBlock lastLocatedBlock = toLocatedBlock(
+        (Map<?, ?>)JSON.parse((String)m.get("lastLocatedBlock")));
+    final boolean isLastBlockComplete = (Boolean)m.get("isLastBlockComplete");
+    return new LocatedBlocks(fileLength, isUnderConstruction, locatedBlocks,
+        lastLocatedBlock, isLastBlockComplete);
+  }
 }
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Thu Sep 29 00:42:47 2011
@@ -27,9 +27,12 @@ import java.net.HttpURLConnection;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
+import java.util.Arrays;
+import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
@@ -45,6 +48,7 @@ import org.apache.hadoop.hdfs.protocol.D
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
 import org.apache.hadoop.hdfs.protocol.UnresolvedPathException;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
 import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
 import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
@@ -54,7 +58,9 @@ import org.apache.hadoop.hdfs.web.resour
 import org.apache.hadoop.hdfs.web.resources.GetOpParam;
 import org.apache.hadoop.hdfs.web.resources.GroupParam;
 import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
+import org.apache.hadoop.hdfs.web.resources.LengthParam;
 import org.apache.hadoop.hdfs.web.resources.ModificationTimeParam;
+import org.apache.hadoop.hdfs.web.resources.OffsetParam;
 import org.apache.hadoop.hdfs.web.resources.OverwriteParam;
 import org.apache.hadoop.hdfs.web.resources.OwnerParam;
 import org.apache.hadoop.hdfs.web.resources.Param;
@@ -63,13 +69,16 @@ import org.apache.hadoop.hdfs.web.resour
 import org.apache.hadoop.hdfs.web.resources.PutOpParam;
 import org.apache.hadoop.hdfs.web.resources.RecursiveParam;
 import org.apache.hadoop.hdfs.web.resources.RenameOptionSetParam;
+import org.apache.hadoop.hdfs.web.resources.RenewerParam;
 import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
 import org.apache.hadoop.hdfs.web.resources.UserParam;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Progressable;
 import org.mortbay.util.ajax.JSON;
 
@@ -82,17 +91,24 @@ public class WebHdfsFileSystem extends H
 
   private static final KerberosUgiAuthenticator AUTH = new KerberosUgiAuthenticator();
 
-  private UserGroupInformation ugi;
+  private final UserGroupInformation ugi;
   private final AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
   protected Path workingDir;
 
+  {
+    try {
+      ugi = UserGroupInformation.getCurrentUser();
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
   @Override
   public synchronized void initialize(URI uri, Configuration conf
       ) throws IOException {
     super.initialize(uri, conf);
     setConf(conf);
 
-    ugi = UserGroupInformation.getCurrentUser();
     this.workingDir = getHomeDirectory();
   }
 
@@ -163,11 +179,11 @@ public class WebHdfsFileSystem extends H
     }
   }
 
-  private URL toUrl(final HttpOpParam.Op op, final Path fspath,
+  URL toUrl(final HttpOpParam.Op op, final Path fspath,
       final Param<?,?>... parameters) throws IOException {
     //initialize URI path and query
     final String path = "/" + PATH_PREFIX
-        + makeQualified(fspath).toUri().getPath();
+        + (fspath == null? "/": makeQualified(fspath).toUri().getPath());
     final String query = op.toQueryString()
         + '&' + new UserParam(ugi)
         + Param.toSortedString("&", parameters);
@@ -396,4 +412,41 @@ public class WebHdfsFileSystem extends H
     }
     return statuses;
   }
+
+  @Override
+  public Token<DelegationTokenIdentifier> getDelegationToken(final String renewer
+      ) throws IOException {
+    final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKEN;
+    final Map<String, Object> m = run(op, null, new RenewerParam(renewer));
+    final Token<DelegationTokenIdentifier> token = JsonUtil.toDelegationToken(m); 
+    token.setService(new Text(getCanonicalServiceName()));
+    return token;
+  }
+
+  @Override
+  public List<Token<?>> getDelegationTokens(final String renewer
+      ) throws IOException {
+    final Token<?>[] t = {getDelegationToken(renewer)};
+    return Arrays.asList(t);
+  }
+
+  @Override
+  public BlockLocation[] getFileBlockLocations(final FileStatus status,
+      final long offset, final long length) throws IOException {
+    if (status == null) {
+      return null;
+    }
+    return getFileBlockLocations(status.getPath(), offset, length);
+  }
+
+  @Override
+  public BlockLocation[] getFileBlockLocations(final Path p, 
+      final long offset, final long length) throws IOException {
+    statistics.incrementReadOps(1);
+
+    final HttpOpParam.Op op = GetOpParam.Op.GETFILEBLOCKLOCATIONS;
+    final Map<String, Object> m = run(op, p, new OffsetParam(offset),
+        new LengthParam(length));
+    return DFSUtil.locatedBlocks2Locations(JsonUtil.toLocatedBlocks(m));
+  }
 }
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java Thu Sep 29 00:42:47 2011
@@ -20,7 +20,7 @@ package org.apache.hadoop.hdfs.web.resou
 /** Access time parameter. */
 public class AccessTimeParam extends LongParam {
   /** Parameter name. */
-  public static final String NAME = "accessTime";
+  public static final String NAME = "accesstime";
   /** Default parameter value. */
   public static final String DEFAULT = "-1";
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java Thu Sep 29 00:42:47 2011
@@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configurat
 /** Block size parameter. */
 public class BlockSizeParam extends LongParam {
   /** Parameter name. */
-  public static final String NAME = "blockSize";
+  public static final String NAME = "blocksize";
   /** Default parameter value. */
   public static final String DEFAULT = NULL;
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java Thu Sep 29 00:42:47 2011
@@ -23,7 +23,7 @@ import org.apache.hadoop.fs.CommonConfig
 /** Buffer size parameter. */
 public class BufferSizeParam extends IntegerParam {
   /** Parameter name. */
-  public static final String NAME = "bufferSize";
+  public static final String NAME = "buffersize";
   /** Default parameter value. */
   public static final String DEFAULT = NULL;
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java Thu Sep 29 00:42:47 2011
@@ -17,13 +17,12 @@
  */
 package org.apache.hadoop.hdfs.web.resources;
 
-import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.security.UserGroupInformation;
 
 /** Delegation token parameter. */
 public class DelegationParam extends StringParam {
   /** Parameter name. */
-  public static final String NAME = JspHelper.DELEGATION_PARAMETER_NAME;
+  public static final String NAME = "delegation";
   /** Default parameter value. */
   public static final String DEFAULT = "";
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java Thu Sep 29 00:42:47 2011
@@ -21,9 +21,6 @@ import java.net.HttpURLConnection;
 
 /** Http DELETE operation parameter. */
 public class DeleteOpParam extends HttpOpParam<DeleteOpParam.Op> {
-  /** Parameter name. */
-  public static final String NAME = "deleteOp";
-
   /** Delete operations. */
   public static enum Op implements HttpOpParam.Op {
     DELETE(HttpURLConnection.HTTP_OK),

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java Thu Sep 29 00:42:47 2011
@@ -22,7 +22,7 @@ import org.apache.hadoop.fs.Path;
 /** Destination path parameter. */
 public class DstPathParam extends StringParam {
   /** Parameter name. */
-  public static final String NAME = "dstPath";
+  public static final String NAME = "dstpath";
   /** Default parameter value. */
   public static final String DEFAULT = "";
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java Thu Sep 29 00:42:47 2011
@@ -21,16 +21,16 @@ import java.net.HttpURLConnection;
 
 /** Http GET operation parameter. */
 public class GetOpParam extends HttpOpParam<GetOpParam.Op> {
-  /** Parameter name. */
-  public static final String NAME = "getOp";
-
   /** Get operations. */
   public static enum Op implements HttpOpParam.Op {
     OPEN(HttpURLConnection.HTTP_OK),
+    GETFILEBLOCKLOCATIONS(HttpURLConnection.HTTP_OK),
 
     GETFILESTATUS(HttpURLConnection.HTTP_OK),
     LISTSTATUS(HttpURLConnection.HTTP_OK),
 
+    GETDELEGATIONTOKEN(HttpURLConnection.HTTP_OK),
+
     NULL(HttpURLConnection.HTTP_NOT_IMPLEMENTED);
 
     final int expectedHttpResponseCode;

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java Thu Sep 29 00:42:47 2011
@@ -20,6 +20,9 @@ package org.apache.hadoop.hdfs.web.resou
 /** Http operation parameter. */
 public abstract class HttpOpParam<E extends Enum<E> & HttpOpParam.Op>
     extends EnumParam<E> {
+  /** Parameter name. */
+  public static final String NAME = "op";
+
   /** Default parameter value. */
   public static final String DEFAULT = NULL;
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java Thu Sep 29 00:42:47 2011
@@ -20,7 +20,7 @@ package org.apache.hadoop.hdfs.web.resou
 /** Modification time parameter. */
 public class ModificationTimeParam extends LongParam {
   /** Parameter name. */
-  public static final String NAME = "modificationTime";
+  public static final String NAME = "modificationtime";
   /** Default parameter value. */
   public static final String DEFAULT = "-1";
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/OverwriteParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/OverwriteParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/OverwriteParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/OverwriteParam.java Thu Sep 29 00:42:47 2011
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hdfs.web.resources;
 
-/** Recursive parameter. */
+/** Overwrite parameter. */
 public class OverwriteParam extends BooleanParam {
   /** Parameter name. */
   public static final String NAME = "overwrite";

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java Thu Sep 29 00:42:47 2011
@@ -21,9 +21,6 @@ import java.net.HttpURLConnection;
 
 /** Http POST operation parameter. */
 public class PostOpParam extends HttpOpParam<PostOpParam.Op> {
-  /** Parameter name. */
-  public static final String NAME = "postOp";
-
   /** Post operations. */
   public static enum Op implements HttpOpParam.Op {
     APPEND(HttpURLConnection.HTTP_OK),

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java Thu Sep 29 00:42:47 2011
@@ -21,9 +21,6 @@ import java.net.HttpURLConnection;
 
 /** Http POST operation parameter. */
 public class PutOpParam extends HttpOpParam<PutOpParam.Op> {
-  /** Parameter name. */
-  public static final String NAME = "putOp";
-
   /** Put operations. */
   public static enum Op implements HttpOpParam.Op {
     CREATE(true, HttpURLConnection.HTTP_CREATED),

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenameOptionSetParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenameOptionSetParam.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenameOptionSetParam.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenameOptionSetParam.java Thu Sep 29 00:42:47 2011
@@ -22,7 +22,7 @@ import org.apache.hadoop.fs.Options;
 /** Rename option set parameter. */
 public class RenameOptionSetParam extends EnumSetParam<Options.Rename> {
   /** Parameter name. */
-  public static final String NAME = "renameOptions";
+  public static final String NAME = "renameoptions";
   /** Default parameter value. */
   public static final String DEFAULT = "";
 

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/native/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep 29 00:42:47 2011
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/native:1152502-1173011
+/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/native:1152502-1177128
 /hadoop/core/branches/branch-0.19/hdfs/src/main/native:713112
 /hadoop/core/branches/branch-0.19/mapred/src/c++/libhdfs:713112
 /hadoop/core/trunk/src/c++/libhdfs:776175-784663

Modified: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml Thu Sep 29 00:42:47 2011
@@ -683,24 +683,4 @@ creations/deletions), or "all".</descrip
   </description>
 </property>
 
-<property>
-  <name>dfs.web.authentication.kerberos.principal</name>
-  <value>HTTP/${dfs.web.hostname}@${kerberos.realm}</value>
-  <description>
-    The HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint.
-
-    The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos
-    HTTP SPENGO specification.
-  </description>
-</property>
-
-<property>
-  <name>dfs.web.authentication.kerberos.keytab</name>
-  <value>${user.home}/dfs.web.keytab</value>
-  <description>
-    The Kerberos keytab file with the credentials for the
-    HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint.
-  </description>
-</property>
-
 </configuration>

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep 29 00:42:47 2011
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode:1159757-1173011
+/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode:1159757-1177128
 /hadoop/core/branches/branch-0.19/hdfs/src/main/webapps/datanode:713112
 /hadoop/core/branches/branch-0.19/hdfs/src/webapps/datanode:713112
 /hadoop/core/trunk/src/webapps/datanode:776175-784663



Mime
View raw message