hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1124466 [2/3] - in /hadoop/hdfs/branches/HDFS-1073: ./ bin/ conf/ src/c++/libhdfs/ src/contrib/ src/contrib/hdfsproxy/ src/contrib/thriftfs/ src/java/ src/java/org/apache/hadoop/hdfs/ src/java/org/apache/hadoop/hdfs/protocol/ src/java/org/...
Date Wed, 18 May 2011 23:44:25 GMT
Modified: hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Wed May 18 23:44:23 2011
@@ -35,7 +35,6 @@ import org.apache.hadoop.hdfs.server.com
 import org.apache.hadoop.hdfs.server.common.Util;
 import static org.apache.hadoop.hdfs.server.common.Util.now;
 import org.apache.hadoop.hdfs.server.namenode.metrics.FSNamesystemMBean;
-import org.apache.hadoop.hdfs.server.namenode.metrics.FSNamesystemMetrics;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
@@ -45,7 +44,6 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
 import org.apache.hadoop.util.*;
-import org.apache.hadoop.metrics.util.MBeanUtil;
 import org.apache.hadoop.net.CachedDNSToSwitchMapping;
 import org.apache.hadoop.net.DNSToSwitchMapping;
 import org.apache.hadoop.net.NetworkTopology;
@@ -86,6 +84,11 @@ import org.apache.hadoop.fs.permission.*
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MutableCounterInt;
+import org.apache.hadoop.metrics2.util.MBeans;
 import org.mortbay.util.ajax.JSON;
 
 import java.io.BufferedWriter;
@@ -104,11 +107,9 @@ import java.util.*;
 import java.util.concurrent.TimeUnit;
 import java.util.Map.Entry;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
-
 import javax.management.NotCompliantMBeanException;
 import javax.management.ObjectName;
 import javax.management.StandardMBean;
-import javax.management.MBeanServer;
 
 /***************************************************
  * FSNamesystem does the actual bookkeeping work for the
@@ -123,8 +124,9 @@ import javax.management.MBeanServer;
  * 5)  LRU cache of updated-heartbeat machines
  ***************************************************/
 @InterfaceAudience.Private
-public class FSNamesystem implements FSConstants, FSNamesystemMBean, FSClusterStats,
-    NameNodeMXBean {
+@Metrics(context="dfs")
+public class FSNamesystem implements FSConstants, FSNamesystemMBean,
+    FSClusterStats, NameNodeMXBean {
   public static final Log LOG = LogFactory.getLog(FSNamesystem.class);
 
   private static final ThreadLocal<StringBuilder> auditBuffer =
@@ -178,7 +180,7 @@ public class FSNamesystem implements FSC
   private String supergroup;
   private PermissionStatus defaultPermission;
   // FSNamesystemMetrics counter variables
-  private FSNamesystemMetrics myFSMetrics;
+  @Metric private MutableCounterInt expiredHeartbeats;
   private long capacityTotal = 0L, capacityUsed = 0L, capacityRemaining = 0L;
   private long blockPoolUsed = 0L;
   private int totalLoad = 0;
@@ -326,14 +328,14 @@ public class FSNamesystem implements FSC
     this.fsLock = new ReentrantReadWriteLock(true); // fair locking
     setConfigurationParameters(conf);
     dtSecretManager = createDelegationTokenSecretManager(conf);
-    this.registerMBean(conf); // register the MBean for the FSNamesystemStutus
+    this.registerMBean(); // register the MBean for the FSNamesystemState
     if(fsImage == null) {
       this.dir = new FSDirectory(this, conf);
       StartupOption startOpt = NameNode.getStartupOption(conf);
       this.dir.loadFSImage(startOpt);
       long timeTakenToLoadFSImage = now() - systemStart;
       LOG.info("Finished loading FSImage in " + timeTakenToLoadFSImage + " msecs");
-      NameNode.getNameNodeMetrics().fsImageLoadTime.set(
+      NameNode.getNameNodeMetrics().setFsImageLoadTime(
                                 (int) timeTakenToLoadFSImage);
     } else {
       this.dir = new FSDirectory(fsImage, this, conf);
@@ -391,6 +393,7 @@ public class FSNamesystem implements FSC
       dnsToSwitchMapping.resolve(new ArrayList<String>(hostsReader.getHosts()));
     }
     registerMXBean();
+    DefaultMetricsSystem.instance().register(this);
   }
 
   public static Collection<URI> getNamespaceDirs(Configuration conf) {
@@ -1640,7 +1643,10 @@ public class FSNamesystem implements FSC
     if (targets.length < blockManager.minReplication) {
       throw new IOException("File " + src + " could only be replicated to " +
                             targets.length + " nodes, instead of " +
-                            blockManager.minReplication);
+                            blockManager.minReplication + ". There are "
+                            +clusterMap.getNumOfLeaves()+" datanode(s) running"
+                            +" but "+excludedNodes.size() +
+                            " node(s) are excluded in this operation.");
     }
 
     // Allocate a new block and record it in the INode. 
@@ -3186,7 +3192,7 @@ public class FSNamesystem implements FSC
              it.hasNext();) {
           DatanodeDescriptor nodeInfo = it.next();
           if (isDatanodeDead(nodeInfo)) {
-            myFSMetrics.numExpiredHeartbeats.inc();
+            expiredHeartbeats.incr();
             foundDead = true;
             nodeID = nodeInfo;
             break;
@@ -3253,7 +3259,7 @@ public class FSNamesystem implements FSC
     }
 
     // Log the block report processing stats from Namenode perspective
-    NameNode.getNameNodeMetrics().blockReport.inc((int) (endTime - startTime));
+    NameNode.getNameNodeMetrics().addBlockReport((int) (endTime - startTime));
     NameNode.stateChangeLog.info("BLOCK* NameSystem.processReport: from "
         + nodeID.getName() + ", blocks: " + newReport.getNumberOfBlocks()
         + ", processing time: " + (endTime - startTime) + " msecs");
@@ -3397,6 +3403,7 @@ public class FSNamesystem implements FSC
     }
   }
 
+  @Metric({"MissingBlocks", "Number of missing blocks"})
   public long getMissingBlocksCount() {
     // not locking
     return blockManager.getMissingBlocksCount();
@@ -3423,6 +3430,11 @@ public class FSNamesystem implements FSC
     }
   }
 
+  @Metric
+  public float getCapacityTotalGB() {
+    return DFSUtil.roundBytesToGB(getCapacityTotal());
+  }
+
   /**
    * Total used space by data nodes
    */
@@ -3432,6 +3444,12 @@ public class FSNamesystem implements FSC
       return capacityUsed;
     }
   }
+
+  @Metric
+  public float getCapacityUsedGB() {
+    return DFSUtil.roundBytesToGB(getCapacityUsed());
+  }
+
   /**
    * Total used space by data nodes as percentage of total capacity
    */
@@ -3460,6 +3478,11 @@ public class FSNamesystem implements FSC
     }
   }
 
+  @Metric
+  public float getCapacityRemainingGB() {
+    return DFSUtil.roundBytesToGB(getCapacityRemaining());
+  }
+
   /**
    * Total remaining space by data nodes as percentage of total capacity
    */
@@ -3472,6 +3495,7 @@ public class FSNamesystem implements FSC
    * Total number of connections.
    */
   @Override // FSNamesystemMBean
+  @Metric
   public int getTotalLoad() {
     synchronized (heartbeats) {
       return this.totalLoad;
@@ -3944,7 +3968,7 @@ public class FSNamesystem implements FSC
      * @param conf configuration
      */
     SafeModeInfo(Configuration conf) {
-      this.threshold = conf.getFloat(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, 0.95f);
+      this.threshold = conf.getFloat(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_DEFAULT);
       this.datanodeThreshold = conf.getInt(
         DFSConfigKeys.DFS_NAMENODE_SAFEMODE_MIN_DATANODES_KEY,
         DFSConfigKeys.DFS_NAMENODE_SAFEMODE_MIN_DATANODES_DEFAULT);
@@ -4039,7 +4063,7 @@ public class FSNamesystem implements FSC
       long timeInSafemode = now() - systemStart;
       NameNode.stateChangeLog.info("STATE* Leaving safe mode after " 
                                     + timeInSafemode/1000 + " secs.");
-      NameNode.getNameNodeMetrics().safeModeTime.set((int) timeInSafemode);
+      NameNode.getNameNodeMetrics().setSafeModeTime((int) timeInSafemode);
       
       if (reached >= 0) {
         NameNode.stateChangeLog.info("STATE* Safe mode is OFF."); 
@@ -4409,6 +4433,7 @@ public class FSNamesystem implements FSC
    * Get the total number of blocks in the system. 
    */
   @Override // FSNamesystemMBean
+  @Metric
   public long getBlocksTotal() {
     return blockManager.getTotalBlocks();
   }
@@ -4687,16 +4712,19 @@ public class FSNamesystem implements FSC
   }
 
   @Override // FSNamesystemMBean
+  @Metric
   public long getFilesTotal() {
     return this.dir.totalInodes();
   }
 
   @Override // FSNamesystemMBean
+  @Metric
   public long getPendingReplicationBlocks() {
     return blockManager.pendingReplicationBlocksCount;
   }
 
   @Override // FSNamesystemMBean
+  @Metric
   public long getUnderReplicatedBlocks() {
     return blockManager.underReplicatedBlocksCount;
   }
@@ -4707,23 +4735,28 @@ public class FSNamesystem implements FSC
   }
 
   /** Returns number of blocks with corrupt replicas */
+  @Metric({"CorruptBlocks", "Number of blocks with corrupt replicas"})
   public long getCorruptReplicaBlocks() {
     return blockManager.corruptReplicaBlocksCount;
   }
 
   @Override // FSNamesystemMBean
+  @Metric
   public long getScheduledReplicationBlocks() {
     return blockManager.scheduledReplicationBlocksCount;
   }
 
+  @Metric
   public long getPendingDeletionBlocks() {
     return blockManager.pendingDeletionBlocksCount;
   }
 
+  @Metric
   public long getExcessBlocks() {
     return blockManager.excessBlocksCount;
   }
   
+  @Metric
   public int getBlockCapacity() {
     return blockManager.getCapacity();
   }
@@ -4738,28 +4771,16 @@ public class FSNamesystem implements FSC
    * Register the FSNamesystem MBean using the name
    *        "hadoop:service=NameNode,name=FSNamesystemState"
    */
-  void registerMBean(Configuration conf) {
-    // We wrap to bypass standard mbean naming convention.
-    // This wraping can be removed in java 6 as it is more flexible in 
-    // package naming for mbeans and their impl.
-    StandardMBean bean;
-    try {
-      myFSMetrics = new FSNamesystemMetrics(this, conf);
-      bean = new StandardMBean(this,FSNamesystemMBean.class);
-      mbeanName = MBeanUtil.registerMBean("NameNode", "FSNamesystemState", bean);
+  void registerMBean() {
+    // We can only implement one MXBean interface, so we keep the old one.
+    try {
+      StandardMBean bean = new StandardMBean(this, FSNamesystemMBean.class);
+      mbeanName = MBeans.register("NameNode", "FSNamesystemState", bean);
     } catch (NotCompliantMBeanException e) {
-      LOG.warn("Exception in initializing StandardMBean as FSNamesystemMBean",
-	  e);
+      throw new RuntimeException("Bad MBean setup", e);
     }
 
-    LOG.info("Registered FSNamesystemStatusMBean");
-  }
-
-  /**
-   * get FSNamesystemMetrics
-   */
-  public FSNamesystemMetrics getFSNamesystemMetrics() {
-    return myFSMetrics;
+    LOG.info("Registered FSNamesystemState MBean");
   }
 
   /**
@@ -4767,7 +4788,7 @@ public class FSNamesystem implements FSC
    */
   public void shutdown() {
     if (mbeanName != null)
-      MBeanUtil.unregisterMBean(mbeanName);
+      MBeans.unregister(mbeanName);
   }
   
 
@@ -5399,12 +5420,11 @@ public class FSNamesystem implements FSC
   }
   
   /**
-   * If the remote IP for namenode method invokation is null, then the
-   * invocation is internal to the namenode. Client invoked methods are invoked
-   * over RPC and always have address != null.
+   * Client invoked methods are invoked over RPC and will be in 
+   * RPC call context even if the client exits.
    */
   private boolean isExternalInvocation() {
-    return Server.getRemoteIp() != null;
+    return Server.isRpcInvocation();
   }
   
   /**
@@ -5421,17 +5441,7 @@ public class FSNamesystem implements FSC
    * Register NameNodeMXBean
    */
   private void registerMXBean() {
-    // register MXBean
-    MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
-    try {
-      ObjectName mxbeanName = new ObjectName("HadoopInfo:type=NameNodeInfo");
-      mbs.registerMBean(this, mxbeanName);
-    } catch ( javax.management.InstanceAlreadyExistsException iaee ) {
-      // in unit tests, we may run and restart the NN within the same JVM
-      LOG.info("NameNode MXBean already registered");
-    } catch ( javax.management.JMException e ) {
-      LOG.warn("Failed to register NameNodeMXBean", e);
-    }
+    MBeans.register("NameNode", "NameNodeInfo", this);
   }
 
   /**
@@ -5504,6 +5514,7 @@ public class FSNamesystem implements FSC
   }
 
   @Override // NameNodeMXBean
+  @Metric
   public long getTotalFiles() {
     return getFilesTotal();
   }

Modified: hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java Wed May 18 23:44:23 2011
@@ -87,6 +87,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.ProtocolSignature;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.Server;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.NetworkTopology;
 import org.apache.hadoop.net.Node;
@@ -101,6 +102,7 @@ import org.apache.hadoop.security.author
 import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.tools.GetUserMappingsProtocol;
 import org.apache.hadoop.util.ServicePlugin;
 import org.apache.hadoop.util.StringUtils;
 
@@ -145,6 +147,34 @@ public class NameNode implements Namenod
     Configuration.addDefaultResource("hdfs-site.xml");
   }
   
+  /**
+   * HDFS federation configuration can have two types of parameters:
+   * <ol>
+   * <li>Parameter that is common for all the name services in the cluster.</li>
+   * <li>Parameters that are specific to a name service. This keys are suffixed
+   * with nameserviceId in the configuration. For example,
+   * "dfs.namenode.rpc-address.nameservice1".</li>
+   * </ol>
+   * 
+   * Following are nameservice specific keys.
+   */
+  public static final String[] NAMESERVICE_SPECIFIC_KEYS = {
+    DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY,
+    DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY,
+    DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY,
+    DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_DIR_KEY,
+    DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY,
+    DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
+    DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
+    DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
+    DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
+    DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
+    DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY,
+    DFSConfigKeys.DFS_NAMENODE_BACKUP_ADDRESS_KEY,
+    DFSConfigKeys.DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY,
+    DFSConfigKeys.DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY
+  };
+  
   public long getProtocolVersion(String protocol, 
                                  long clientVersion) throws IOException {
     if (protocol.equals(ClientProtocol.class.getName())) {
@@ -157,6 +187,8 @@ public class NameNode implements Namenod
       return RefreshAuthorizationPolicyProtocol.versionID;
     } else if (protocol.equals(RefreshUserMappingsProtocol.class.getName())){
       return RefreshUserMappingsProtocol.versionID;
+    } else if (protocol.equals(GetUserMappingsProtocol.class.getName())){
+      return GetUserMappingsProtocol.versionID;
     } else {
       throw new IOException("Unknown protocol to name node: " + protocol);
     }
@@ -211,7 +243,7 @@ public class NameNode implements Namenod
     format(conf, false);
   }
 
-  static NameNodeMetrics myMetrics;
+  static NameNodeMetrics metrics;
 
   /** Return the {@link FSNamesystem} object.
    * @return {@link FSNamesystem} object.
@@ -221,11 +253,11 @@ public class NameNode implements Namenod
   }
 
   static void initMetrics(Configuration conf, NamenodeRole role) {
-    myMetrics = new NameNodeMetrics(conf, role);
+    metrics = NameNodeMetrics.create(conf, role);
   }
 
   public static NameNodeMetrics getNameNodeMetrics() {
-    return myMetrics;
+    return metrics;
   }
   
   public static InetSocketAddress getAddress(String address) {
@@ -638,8 +670,8 @@ public class NameNode implements Namenod
     if(emptier != null) emptier.interrupt();
     if(server != null) server.stop();
     if(serviceRpcServer != null) serviceRpcServer.stop();
-    if (myMetrics != null) {
-      myMetrics.shutdown();
+    if (metrics != null) {
+      metrics.shutdown();
     }
     if (namesystem != null) {
       namesystem.shutdown();
@@ -749,7 +781,7 @@ public class NameNode implements Namenod
                                           long offset, 
                                           long length) 
       throws IOException {
-    myMetrics.numGetBlockLocations.inc();
+    metrics.incrGetBlockLocations();
     return namesystem.getBlockLocations(getClientMachine(), 
                                         src, offset, length);
   }
@@ -788,8 +820,8 @@ public class NameNode implements Namenod
         new PermissionStatus(UserGroupInformation.getCurrentUser().getShortUserName(),
             null, masked),
         clientName, clientMachine, flag.get(), createParent, replication, blockSize);
-    myMetrics.numFilesCreated.inc();
-    myMetrics.numCreateFileOps.inc();
+    metrics.incrFilesCreated();
+    metrics.incrCreateFileOps();
   }
 
   /** {@inheritDoc} */
@@ -801,7 +833,7 @@ public class NameNode implements Namenod
           +src+" for "+clientName+" at "+clientMachine);
     }
     LocatedBlock info = namesystem.appendFile(src, clientName, clientMachine);
-    myMetrics.numFilesAppended.inc();
+    metrics.incrFilesAppended();
     return info;
   }
 
@@ -843,7 +875,7 @@ public class NameNode implements Namenod
     LocatedBlock locatedBlock = 
       namesystem.getAdditionalBlock(src, clientName, previous, excludedNodesSet);
     if (locatedBlock != null)
-      myMetrics.numAddBlockOps.inc();
+      metrics.incrAddBlockOps();
     return locatedBlock;
   }
 
@@ -861,7 +893,7 @@ public class NameNode implements Namenod
           + ", clientName=" + clientName);
     }
 
-    myMetrics.numGetAdditionalDatanodeOps.inc();
+    metrics.incrGetAdditionalDatanodeOps();
 
     HashMap<Node, Node> excludeSet = null;
     if (excludes != null) {
@@ -958,7 +990,7 @@ public class NameNode implements Namenod
     }
     boolean ret = namesystem.renameTo(src, dst);
     if (ret) {
-      myMetrics.numFilesRenamed.inc();
+      metrics.incrFilesRenamed();
     }
     return ret;
   }
@@ -982,7 +1014,7 @@ public class NameNode implements Namenod
                             + MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels.");
     }
     namesystem.renameTo(src, dst, options);
-    myMetrics.numFilesRenamed.inc();
+    metrics.incrFilesRenamed();
   }
 
   /**
@@ -1000,7 +1032,7 @@ public class NameNode implements Namenod
     }
     boolean ret = namesystem.delete(src, recursive);
     if (ret) 
-      myMetrics.numDeleteFileOps.inc();
+      metrics.incrDeleteFileOps();
     return ret;
   }
 
@@ -1046,8 +1078,8 @@ public class NameNode implements Namenod
     DirectoryListing files = namesystem.getListing(
         src, startAfter, needLocation);
     if (files != null) {
-      myMetrics.numGetListingOps.inc();
-      myMetrics.numFilesInGetListingOps.inc(files.getPartialListing().length);
+      metrics.incrGetListingOps();
+      metrics.incrFilesInGetListingOps(files.getPartialListing().length);
     }
     return files;
   }
@@ -1059,7 +1091,7 @@ public class NameNode implements Namenod
    *         or null if file not found
    */
   public HdfsFileStatus getFileInfo(String src)  throws IOException {
-    myMetrics.numFileInfoOps.inc();
+    metrics.incrFileInfoOps();
     return namesystem.getFileInfo(src, true);
   }
 
@@ -1071,11 +1103,11 @@ public class NameNode implements Namenod
    *         or null if file not found
    */
   public HdfsFileStatus getFileLinkInfo(String src) throws IOException { 
-    myMetrics.numFileInfoOps.inc();
+    metrics.incrFileInfoOps();
     return namesystem.getFileInfo(src, false);
   }
   
-  /** @inheritDoc */
+  @Override
   public long[] getStats() {
     return namesystem.getStats();
   }
@@ -1091,9 +1123,7 @@ public class NameNode implements Namenod
     return results;
   }
     
-  /**
-   * @inheritDoc
-   */
+  @Override
   public boolean setSafeMode(SafeModeAction action) throws IOException {
     return namesystem.setSafeMode(action);
   }
@@ -1105,18 +1135,13 @@ public class NameNode implements Namenod
     return namesystem.isInSafeMode();
   }
 
-  /**
-   * @throws AccessControlException 
-   * @inheritDoc
-   */
+  @Override
   public boolean restoreFailedStorage(String arg) 
       throws AccessControlException {
     return namesystem.restoreFailedStorage(arg);
   }
 
-  /**
-   * @inheritDoc
-   */
+  @Override
   public void saveNamespace() throws IOException {
     namesystem.saveNamespace();
   }
@@ -1212,17 +1237,17 @@ public class NameNode implements Namenod
     namesystem.fsync(src, clientName);
   }
 
-  /** @inheritDoc */
+  @Override
   public void setTimes(String src, long mtime, long atime) 
       throws IOException {
     namesystem.setTimes(src, mtime, atime);
   }
 
-  /** @inheritDoc */
+  @Override
   public void createSymlink(String target, String link, FsPermission dirPerms, 
                             boolean createParent) 
       throws IOException {
-    myMetrics.numcreateSymlinkOps.inc();
+    metrics.incrCreateSymlinkOps();
     /* We enforce the MAX_PATH_LENGTH limit even though a symlink target 
      * URI may refer to a non-HDFS file system. 
      */
@@ -1239,9 +1264,9 @@ public class NameNode implements Namenod
       new PermissionStatus(ugi.getShortUserName(), null, dirPerms), createParent);
   }
 
-  /** @inheritDoc */
+  @Override
   public String getLinkTarget(String path) throws IOException {
-    myMetrics.numgetLinkTargetOps.inc();
+    metrics.incrGetLinkTargetOps();
     /* Resolves the first symlink in the given path, returning a
      * new path consisting of the target of the symlink and any 
      * remaining path components from the original path.
@@ -1547,6 +1572,14 @@ public class NameNode implements Namenod
 
     ProxyUsers.refreshSuperUserGroupsConfiguration();
   }
+  
+  @Override
+  public String[] getGroupsForUser(String user) throws IOException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Getting groups for user " + user);
+    }
+    return UserGroupInformation.createRemoteUser(user).getGroupNames();
+  }
 
   private static void printUsage() {
     System.err.println(
@@ -1638,8 +1671,11 @@ public class NameNode implements Namenod
         return null; // avoid javac warning
       case BACKUP:
       case CHECKPOINT:
-        return new BackupNode(conf, startOpt.toNodeRole());
+        NamenodeRole role = startOpt.toNodeRole();
+        DefaultMetricsSystem.initialize(role.toString().replace(" ", ""));
+        return new BackupNode(conf, role);
       default:
+        DefaultMetricsSystem.initialize("NameNode");
         return new NameNode(conf);
     }
   }
@@ -1658,25 +1694,15 @@ public class NameNode implements Namenod
    * @param conf
    *          Configuration object to lookup specific key and to set the value
    *          to the key passed. Note the conf object is modified
-   * @see DFSUtil#setGenericConf()
+   * @see DFSUtil#setGenericConf(Configuration, String, String...)
    */
-  static void initializeGenericKeys(Configuration conf) {
+  public static void initializeGenericKeys(Configuration conf) {
     final String nameserviceId = DFSUtil.getNameServiceId(conf);
     if ((nameserviceId == null) || nameserviceId.isEmpty()) {
       return;
     }
     
-    DFSUtil.setGenericConf(conf, nameserviceId,
-        DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY,
-        DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
-        DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
-        DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
-        DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
-        DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
-        DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY,
-        DFSConfigKeys.DFS_NAMENODE_BACKUP_ADDRESS_KEY,
-        DFSConfigKeys.DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY,
-        DFSConfigKeys.DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY);
+    DFSUtil.setGenericConf(conf, nameserviceId, NAMESERVICE_SPECIFIC_KEYS);
     
     if (conf.get(DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY) != null) {
       URI defaultUri = URI.create(FSConstants.HDFS_URI_SCHEME + "://"

Modified: hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java Wed May 18 23:44:23 2011
@@ -49,7 +49,8 @@ import org.apache.hadoop.hdfs.server.pro
 import org.apache.hadoop.http.HttpServer;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.metrics.jvm.JvmMetrics;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.source.JvmMetrics;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector;
 import org.apache.hadoop.security.SecurityUtil;
@@ -154,7 +155,9 @@ public class SecondaryNameNode implement
           infoBindAddress);
     }
     // initiate Java VM metrics
-    JvmMetrics.init("SecondaryNameNode", conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY));
+    JvmMetrics.create("SecondaryNameNode",
+        conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY),
+        DefaultMetricsSystem.instance());
     
     // Create connection to the namenode.
     shouldRun = true;

Modified: hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/FSNamesystemMBean.java Wed May 18 23:44:23 2011
@@ -31,8 +31,8 @@ import org.apache.hadoop.classification.
  * be published as an interface.
  * 
  * <p>
- * Name Node runtime activity statistic  info is report in another MBean
- * @see org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeActivityMBean
+ * Name Node runtime activity statistic  info is reported in
+ * @see org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics
  *
  */
 @InterfaceAudience.Private

Modified: hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/namenode/metrics/NameNodeMetrics.java Wed May 18 23:44:23 2011
@@ -17,128 +17,146 @@
  */
 package org.apache.hadoop.hdfs.server.namenode.metrics;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.NamenodeRole;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.metrics.*;
-import org.apache.hadoop.metrics.jvm.JvmMetrics;
-import org.apache.hadoop.metrics.util.MetricsBase;
-import org.apache.hadoop.metrics.util.MetricsIntValue;
-import org.apache.hadoop.metrics.util.MetricsRegistry;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingInt;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import static org.apache.hadoop.metrics2.impl.MsInfo.*;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+import org.apache.hadoop.metrics2.lib.MutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MutableGaugeInt;
+import org.apache.hadoop.metrics2.lib.MutableRate;
+import org.apache.hadoop.metrics2.source.JvmMetrics;
 
 /**
- * 
  * This class is for maintaining  the various NameNode activity statistics
  * and publishing them through the metrics interfaces.
- * This also registers the JMX MBean for RPC.
- * <p>
- * This class has a number of metrics variables that are publicly accessible;
- * these variables (objects) have methods to update their values;
- *  for example:
- *  <p> {@link #syncs}.inc()
- *
  */
-@InterfaceAudience.Private
-public class NameNodeMetrics implements Updater {
-    private static Log log = LogFactory.getLog(NameNodeMetrics.class);
-    private final MetricsRecord metricsRecord;
-    public MetricsRegistry registry = new MetricsRegistry();
-    
-    private NameNodeActivityMBean namenodeActivityMBean;
-    
-    public MetricsTimeVaryingInt numCreateFileOps = 
-                    new MetricsTimeVaryingInt("CreateFileOps", registry);
-    public MetricsTimeVaryingInt numFilesCreated =
-                          new MetricsTimeVaryingInt("FilesCreated", registry);
-    public MetricsTimeVaryingInt numFilesAppended =
-                          new MetricsTimeVaryingInt("FilesAppended", registry);
-    public MetricsTimeVaryingInt numGetBlockLocations = 
-                    new MetricsTimeVaryingInt("GetBlockLocations", registry);
-    public MetricsTimeVaryingInt numFilesRenamed =
-                    new MetricsTimeVaryingInt("FilesRenamed", registry);
-    public MetricsTimeVaryingInt numGetListingOps = 
-                    new MetricsTimeVaryingInt("GetListingOps", registry);
-    public MetricsTimeVaryingInt numDeleteFileOps = 
-                          new MetricsTimeVaryingInt("DeleteFileOps", registry);
-    public MetricsTimeVaryingInt numFilesDeleted = new MetricsTimeVaryingInt(
-        "FilesDeleted", registry, 
-        "Number of files and directories deleted by delete or rename operation");
-    public MetricsTimeVaryingInt numFileInfoOps =
-                          new MetricsTimeVaryingInt("FileInfoOps", registry);
-    public MetricsTimeVaryingInt numAddBlockOps = 
-                          new MetricsTimeVaryingInt("AddBlockOps", registry);
-    public final MetricsTimeVaryingInt numGetAdditionalDatanodeOps
-        = new MetricsTimeVaryingInt("GetAdditionalDatanodeOps", registry);
-    public MetricsTimeVaryingInt numcreateSymlinkOps = 
-                          new MetricsTimeVaryingInt("CreateSymlinkOps", registry);
-    public MetricsTimeVaryingInt numgetLinkTargetOps = 
-                          new MetricsTimeVaryingInt("GetLinkTargetOps", registry);
-
-    public MetricsTimeVaryingRate transactions = new MetricsTimeVaryingRate(
-      "Transactions", registry, "Journal Transaction");
-    public MetricsTimeVaryingRate syncs =
-                    new MetricsTimeVaryingRate("Syncs", registry, "Journal Sync");
-    public MetricsTimeVaryingInt transactionsBatchedInSync = new MetricsTimeVaryingInt(
-      "JournalTransactionsBatchedInSync", registry,
-      "Journal Transactions Batched In Sync");
-    public MetricsTimeVaryingRate blockReport =
-                    new MetricsTimeVaryingRate("blockReport", registry, "Block Report");
-    public MetricsIntValue safeModeTime =
-                    new MetricsIntValue("SafemodeTime", registry, "Duration in SafeMode at Startup");
-    public MetricsIntValue fsImageLoadTime = 
-                    new MetricsIntValue("fsImageLoadTime", registry, "Time loading FS Image at Startup");
-    public MetricsIntValue numBlocksCorrupted =
-                    new MetricsIntValue("BlocksCorrupted", registry);
-    public MetricsTimeVaryingInt numFilesInGetListingOps = 
-                    new MetricsTimeVaryingInt("FilesInGetListingOps", registry);
-
-      
-    public NameNodeMetrics(Configuration conf, NamenodeRole nameNodeRole) {
-      String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
-      // Initiate Java VM metrics
-      String processName = nameNodeRole.toString();
-      JvmMetrics.init(processName, sessionId);
-
-      // Now the Mbean for the name node - this also registers the MBean
-      namenodeActivityMBean = new NameNodeActivityMBean(registry);
-      
-      // Create a record for NameNode metrics
-      MetricsContext metricsContext = MetricsUtil.getContext("dfs");
-      metricsRecord = MetricsUtil.createRecord(metricsContext, processName.toLowerCase());
-      metricsRecord.setTag("sessionId", sessionId);
-      metricsContext.registerUpdater(this);
-      log.info("Initializing NameNodeMeterics using context object:" +
-                metricsContext.getClass().getName());
-    }
-    
-
-    
-    public void shutdown() {
-      if (namenodeActivityMBean != null) 
-        namenodeActivityMBean.shutdown();
-    }
-      
-    /**
-     * Since this object is a registered updater, this method will be called
-     * periodically, e.g. every 5 seconds.
-     */
-    public void doUpdates(MetricsContext unused) {
-      synchronized (this) {
-        for (MetricsBase m : registry.getMetricsList()) {
-          m.pushMetric(metricsRecord);
-        }
-      }
-      metricsRecord.update();
-    }
-
-    public void resetAllMinMax() {
-      transactions.resetMinMax();
-      syncs.resetMinMax();
-      blockReport.resetMinMax();
-    }
+@Metrics(name="NameNodeActivity", about="NameNode metrics", context="dfs")
+public class NameNodeMetrics {
+  final MetricsRegistry registry = new MetricsRegistry("namenode");
+
+  @Metric MutableCounterLong createFileOps;
+  @Metric MutableCounterLong filesCreated;
+  @Metric MutableCounterLong filesAppended;
+  @Metric MutableCounterLong getBlockLocations;
+  @Metric MutableCounterLong filesRenamed;
+  @Metric MutableCounterLong getListingOps;
+  @Metric MutableCounterLong deleteFileOps;
+  @Metric("Number of files/dirs deleted by delete or rename operations")
+  MutableCounterLong filesDeleted;
+  @Metric MutableCounterLong fileInfoOps;
+  @Metric MutableCounterLong addBlockOps;
+  @Metric MutableCounterLong getAdditionalDatanodeOps;
+  @Metric MutableCounterLong createSymlinkOps;
+  @Metric MutableCounterLong getLinkTargetOps;
+  @Metric MutableCounterLong filesInGetListingOps;
+
+  @Metric("Journal transactions") MutableRate transactions;
+  @Metric("Journal syncs") MutableRate syncs;
+  @Metric("Journal transactions batched in sync")
+  MutableCounterLong transactionsBatchedInSync;
+  @Metric("Block report") MutableRate blockReport;
+
+  @Metric("Duration in SafeMode at startup") MutableGaugeInt safeModeTime;
+  @Metric("Time loading FS Image at startup") MutableGaugeInt fsImageLoadTime;
+
+  NameNodeMetrics(String processName, String sessionId) {
+    registry.tag(ProcessName, processName).tag(SessionId, sessionId);
+  }
+
+  public static NameNodeMetrics create(Configuration conf, NamenodeRole r) {
+    String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
+    String processName = r.toString();
+    MetricsSystem ms = DefaultMetricsSystem.instance();
+    JvmMetrics.create(processName, sessionId, ms);
+    return ms.register(new NameNodeMetrics(processName, sessionId));
+  }
+
+  public void shutdown() {
+    DefaultMetricsSystem.shutdown();
+  }
+
+  public void incrGetBlockLocations() {
+    getBlockLocations.incr();
+  }
+
+  public void incrFilesCreated() {
+    filesCreated.incr();
+  }
+
+  public void incrCreateFileOps() {
+    createFileOps.incr();
+  }
+
+  public void incrFilesAppended() {
+    filesAppended.incr();
+  }
+
+  public void incrAddBlockOps() {
+    addBlockOps.incr();
+  }
+  
+  public void incrGetAdditionalDatanodeOps() {
+    getAdditionalDatanodeOps.incr();
+  }
+
+  public void incrFilesRenamed() {
+    filesRenamed.incr();
+  }
+
+  public void incrFilesDeleted(int delta) {
+    filesDeleted.incr(delta);
+  }
+
+  public void incrDeleteFileOps() {
+    deleteFileOps.incr();
+  }
+
+  public void incrGetListingOps() {
+    getListingOps.incr();
+  }
+
+  public void incrFilesInGetListingOps(int delta) {
+    filesInGetListingOps.incr(delta);
+  }
+
+  public void incrFileInfoOps() {
+    fileInfoOps.incr();
+  }
+
+  public void incrCreateSymlinkOps() {
+    createSymlinkOps.incr();
+  }
+
+  public void incrGetLinkTargetOps() {
+    getLinkTargetOps.incr();
+  }
+
+  public void addTransaction(long latency) {
+    transactions.add(latency);
+  }
+
+  public void incrTransactionsBatchedInSync() {
+    transactionsBatchedInSync.incr();
+  }
+
+  public void addSync(long elapsed) {
+    syncs.add(elapsed);
+  }
+
+  public void setFsImageLoadTime(long elapsed) {
+    fsImageLoadTime.set((int) elapsed);
+  }
+
+  public void addBlockReport(long latency) {
+    blockReport.add(latency);
+  }
+
+  public void setSafeModeTime(long elapsed) {
+    safeModeTime.set((int) elapsed);
+  }
 }

Modified: hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java Wed May 18 23:44:23 2011
@@ -22,6 +22,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
 import org.apache.hadoop.security.RefreshUserMappingsProtocol;
+import org.apache.hadoop.tools.GetUserMappingsProtocol;
 
 /** The full set of RPC methods implemented by the Namenode.  */
 @InterfaceAudience.Private
@@ -30,5 +31,6 @@ public interface NamenodeProtocols
           DatanodeProtocol,
           NamenodeProtocol,
           RefreshAuthorizationPolicyProtocol,
-          RefreshUserMappingsProtocol {
+          RefreshUserMappingsProtocol,
+          GetUserMappingsProtocol {
 }

Modified: hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/tools/JMXGet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/tools/JMXGet.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/tools/JMXGet.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/java/org/apache/hadoop/hdfs/tools/JMXGet.java Wed May 18 23:44:23 2011
@@ -126,7 +126,7 @@ public class JMXGet {
           continue;
         }
       }
-      err("Info: key = " + key + "; val = " + val);
+      err("Info: key = " + key + "; val = "+ val.getClass() +":"+ val);
       break;
     }
 
@@ -193,7 +193,7 @@ public class JMXGet {
     err("\nMBean count = " + mbsc.getMBeanCount());
 
     // Query MBean names for specific domain "hadoop" and service
-    ObjectName query = new ObjectName("hadoop:service=" + service + ",*");
+    ObjectName query = new ObjectName("Hadoop:service=" + service + ",*");
     hadoopObjectNames = new ArrayList<ObjectName>(5);
     err("\nQuery MBeanServer MBeans:");
     Set<ObjectName> names = new TreeSet<ObjectName>(mbsc
@@ -201,7 +201,7 @@ public class JMXGet {
 
     for (ObjectName name : names) {
       hadoopObjectNames.add(name);
-      err("hadoop services: " + name);
+      err("Hadoop service: " + name);
     }
 
   }

Modified: hadoop/hdfs/branches/HDFS-1073/src/test/aop/build/aop.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/test/aop/build/aop.xml?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/test/aop/build/aop.xml (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/test/aop/build/aop.xml Wed May 18 23:44:23 2011
@@ -124,7 +124,7 @@
   <!-- ================ -->
   <!-- run system tests -->
   <!-- ================ -->
-  <target name="test-system" depends="ivy-retrieve-common, ivy-retrieve-system"
+  <target name="test-system" depends="init, ivy-retrieve-system"
     description="Run system tests">
     <subant buildpath="build.xml" target="jar-test-system"/>
     <macro-test-runner test.file="${test.hdfs.all.tests.file}"
@@ -143,7 +143,7 @@
     <delete file="${compile-inject.output}"/>
     <weave-injectfault-aspects dest.dir="${build-fi.dir}/classes"
                                src.dir="${test.src.dir}/aop"
-      aspects.jar="${build-fi.dir}/ivy/lib/${ant.project.name}/test/hadoop-common-${project.version}.jar">
+      aspects.jar="${build-fi.dir}/ivy/lib/${ant.project.name}/common/hadoop-common-${project.version}.jar">
     </weave-injectfault-aspects>
   </target>
 
@@ -185,7 +185,7 @@
     </subant>
   </target>
 
-  <target name="-compile-test-system.wrapper" depends="inject-system-faults, ivy-retrieve-common, ivy-retrieve-system">
+  <target name="-compile-test-system.wrapper" depends="inject-system-faults, ivy-retrieve-common, ivy-retrieve-hdfs, ivy-retrieve-system">
     <macro-compile-hdfs-test
       target.dir="${system-test-build-dir}/test/classes"
       source.dir="${test.src.dir}/system/test"

Propchange: hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed May 18 23:44:23 2011
@@ -3,4 +3,4 @@
 /hadoop/hdfs/branches/HDFS-1052/src/test/hdfs:987665-1095512
 /hadoop/hdfs/branches/HDFS-265/src/test/hdfs:796829-820463
 /hadoop/hdfs/branches/branch-0.21/src/test/hdfs:820487
-/hadoop/hdfs/trunk/src/test/hdfs:1086482-1102504
+/hadoop/hdfs/trunk/src/test/hdfs:1086482-1124460

Modified: hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml Wed May 18 23:44:23 2011
@@ -923,12 +923,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
+          <expected-output>^15\s+/data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -944,12 +940,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data15bytes</expected-output>
+          <expected-output>^15\s+data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -968,24 +960,20 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 4 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data15bytes</expected-output>
+          <expected-output>^120\s+hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data120bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data30bytes</expected-output>
+          <expected-output>^15\s+hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^60( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data60bytes</expected-output>
+          <expected-output>^30\s+hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data30bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^120( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data120bytes</expected-output>
+          <expected-output>^60\s+hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data60bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1002,12 +990,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+/dir0/data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1024,12 +1008,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+dir0/data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1049,10 +1029,6 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 4 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
           <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
         </comparator>
@@ -1082,12 +1058,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs:///data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1106,24 +1078,20 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 4 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
+          <expected-output>^120\s+hdfs://\w+[.a-z]*:[0-9]*/data120bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data30bytes</expected-output>
+          <expected-output>^15\s+hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^60( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data60bytes</expected-output>
+          <expected-output>^30\s+hdfs://\w+[.a-z]*:[0-9]*/data30bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^120( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data120bytes</expected-output>
+          <expected-output>^60\s+hdfs://\w+[.a-z]*:[0-9]*/data60bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1140,12 +1108,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs:///dir0/data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1163,16 +1127,12 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 2 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs:///dir0/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^1.0k( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data1k</expected-output>
+          <expected-output>^1\.0k\s+hdfs:///dir0/data1k</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1192,10 +1152,6 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 4 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
           <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
         </comparator>
@@ -1225,10 +1181,6 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
           <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
         </comparator>
@@ -1249,10 +1201,6 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 4 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
           <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
         </comparator>
@@ -1283,10 +1231,6 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
           <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
         </comparator>
@@ -1308,10 +1252,6 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 4 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
           <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
         </comparator>
@@ -1358,7 +1298,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^450\s+hdfs://\w+[.a-z]*:[0-9]*/dir0</expected-output>
+          <expected-output>^450\s+/dir0</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1390,7 +1330,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^450\s+hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0</expected-output>
+          <expected-output>^450\s+dir0</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -1461,7 +1401,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^450\s+hdfs://\w+[.a-z]*:[0-9]*/dir0</expected-output>
+          <expected-output>^450\s+hdfs:///dir0</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -3954,12 +3894,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
+          <expected-output>^15\s+/data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -3975,12 +3911,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data15bytes</expected-output>
+          <expected-output>^15\s+data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -3997,19 +3929,23 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data15bytes</expected-output>
+          <expected-output>^120\s+/dir0/dir1/data/data120bytes</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^15\s+/dir0/dir1/data/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data30bytes</expected-output>
+          <expected-output>^1065\s+/dir0/dir1/data/data1k</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^60( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data60bytes</expected-output>
+          <expected-output>^30\s+/dir0/dir1/data/data30bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^120( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data120bytes</expected-output>
+          <expected-output>^60\s+/dir0/dir1/data/data60bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4026,19 +3962,23 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/dir1/data/data15bytes</expected-output>
+          <expected-output>^120\s+dir0/dir1/data/data120bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/dir1/data/data30bytes</expected-output>
+          <expected-output>^15\s+dir0/dir1/data/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^60( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/dir1/data/data60bytes</expected-output>
+          <expected-output>^1065\s+dir0/dir1/data/data1k</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^120( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/dir1/data/data120bytes</expected-output>
+          <expected-output>^30\s+dir0/dir1/data/data30bytes</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^60\s+dir0/dir1/data/data60bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4055,16 +3995,12 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 2 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+/dir0/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data30bytes</expected-output>
+          <expected-output>^30\s+/dir0/data30bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4081,16 +4017,12 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 2 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+dir0/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/data30bytes</expected-output>
+          <expected-output>^30\s+dir0/data30bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4209,12 +4141,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs:///data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4231,19 +4159,23 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data15bytes</expected-output>
+          <expected-output>^120\s+hdfs:///dir1/data/data120bytes</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^15\s+hdfs:///dir1/data/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data30bytes</expected-output>
+          <expected-output>^1065\s+hdfs:///dir1/data/data1k</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^60( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data60bytes</expected-output>
+          <expected-output>^30\s+hdfs:///dir1/data/data30bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^120( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data120bytes</expected-output>
+          <expected-output>^60\s+hdfs:///dir1/data/data60bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4260,16 +4192,12 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 2 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs:///dir0/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data30bytes</expected-output>
+          <expected-output>^30\s+hdfs:///dir0/data30bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4351,12 +4279,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4373,19 +4297,23 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data15bytes</expected-output>
+          <expected-output>^120\s+hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data120bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data30bytes</expected-output>
+          <expected-output>^1065\s+hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data1k</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^60( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data60bytes</expected-output>
+          <expected-output>^15\s+hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^120( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data120bytes</expected-output>
+          <expected-output>^30\s+hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data30bytes</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^60\s+hdfs://\w+[.a-z]*:[0-9]*/dir1/data/data60bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4402,16 +4330,12 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 2 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data30bytes</expected-output>
+          <expected-output>^30\s+hdfs://\w+[.a-z]*:[0-9]*/dir0/data30bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4494,12 +4418,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
+          <expected-output>^15\s+/data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4515,12 +4435,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/data15bytes</expected-output>
+          <expected-output>^15\s+data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4537,19 +4453,19 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data15bytes</expected-output>
+          <expected-output>^15\s+/dir0/dir1/data/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data30bytes</expected-output>
+          <expected-output>^30\s+/dir0/dir1/data/data30bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^60( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data60bytes</expected-output>
+          <expected-output>^60\s+/dir0/dir1/data/data60bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^120( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data120bytes</expected-output>
+          <expected-output>^120\s+/dir0/dir1/data/data120bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4566,19 +4482,19 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/dir1/data/data15bytes</expected-output>
+          <expected-output>^15\s+dir0/dir1/data/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/dir1/data/data30bytes</expected-output>
+          <expected-output>^30\s+dir0/dir1/data/data30bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^60( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/dir1/data/data60bytes</expected-output>
+          <expected-output>^60\s+dir0/dir1/data/data60bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^120( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/dir1/data/data120bytes</expected-output>
+          <expected-output>^120\s+dir0/dir1/data/data120bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4595,16 +4511,12 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 2 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+/dir0/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data30bytes</expected-output>
+          <expected-output>^30\s+/dir0/data30bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4621,16 +4533,12 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 2 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+dir0/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0/data30bytes</expected-output>
+          <expected-output>^30\s+dir0/data30bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4749,12 +4657,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs:///data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4773,12 +4677,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs:///dir0/dir1/data/data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4795,16 +4695,12 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 2 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs:///dir0/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data30bytes</expected-output>
+          <expected-output>^30\s+hdfs:///dir0/data30bytes</expected-output>
         </comparator>
      </comparators>
     </test>
@@ -4887,12 +4783,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs://\w+[.a-z]*:[0-9]*/data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4911,12 +4803,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs://\w+[.a-z]*:[0-9]*/dir0/dir1/data/data15bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -4933,16 +4821,12 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 2 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^15( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
+          <expected-output>^15\s+hdfs://\w+[.a-z]*:[0-9]*/dir0/data15bytes</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^30( |\t)*hdfs://\w+[.a-z]*:[0-9]*/dir0/data30bytes</expected-output>
+          <expected-output>^30\s+hdfs://\w+[.a-z]*:[0-9]*/dir0/data30bytes</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -5529,7 +5413,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^0\s+hdfs://\w+[.a-z]*:[0-9]*/dir0</expected-output>
+          <expected-output>^0\s+/dir0</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -5546,7 +5430,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^0\s+hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/dir0</expected-output>
+          <expected-output>^0\s+dir0</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -5661,7 +5545,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^0\s+hdfs://\w+[.a-z]*:[0-9]*/dir0(|\t)*</expected-output>
+          <expected-output>^0\s+hdfs:///dir0</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -6042,12 +5926,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^0( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/file0</expected-output>
+          <expected-output>^0\s+/user/file0</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -6063,12 +5943,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^0( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/file0</expected-output>
+          <expected-output>^0\s+file0</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -6084,10 +5960,6 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 3 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
           <expected-output>^0( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/file0</expected-output>
           <expected-output>^0( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/[a-z]*/file1</expected-output>
@@ -6108,7 +5980,7 @@
       <comparators>
         <comparator>
           <type>TokenComparator</type>
-          <expected-output>touchz: data15bytes must be a zero-length file</expected-output>
+          <expected-output>touchz: `data15bytes': Not a zero-length file</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -6124,12 +5996,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^0( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/file0</expected-output>
+          <expected-output>^0\s+hdfs:///user/file0</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -6145,10 +6013,6 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 3 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
           <expected-output>^0( |\t)*hdfs://\w+[.a-z]*:[0-9]+/file0</expected-output>
           <expected-output>^0( |\t)*hdfs://\w+[.a-z]*:[0-9]+/file1</expected-output>
@@ -6169,7 +6033,7 @@
       <comparators>
         <comparator>
           <type>TokenComparator</type>
-          <expected-output>touchz: hdfs:///data15bytes must be a zero-length file</expected-output>
+          <expected-output>touchz: `hdfs:///data15bytes': Not a zero-length file</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -6185,12 +6049,8 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 1 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^0( |\t)*hdfs://\w+[.a-z]*:[0-9]*/user/file0</expected-output>
+          <expected-output>^0\s+hdfs://\w+[.a-z]*:[0-9]*/user/file0</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -6206,14 +6066,10 @@
       </cleanup-commands>
       <comparators>
         <comparator>
-          <type>TokenComparator</type>
-          <expected-output>Found 3 items</expected-output>
-        </comparator>
-        <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^0( )*hdfs://\w+[.a-z]*:[0-9]+/file0</expected-output>
-          <expected-output>^0( )*hdfs://\w+[.a-z]*:[0-9]+/file1</expected-output>
-          <expected-output>^0( )*hdfs://\w+[.a-z]*:[0-9]+/file2</expected-output>
+          <expected-output>^0\s+hdfs://\w+[.a-z]*:[0-9]+/file0</expected-output>
+          <expected-output>^0\s+hdfs://\w+[.a-z]*:[0-9]+/file1</expected-output>
+          <expected-output>^0\s+hdfs://\w+[.a-z]*:[0-9]+/file2</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -6230,7 +6086,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>touchz: hdfs://\w+[.a-z]*:[0-9]+/data15bytes must be a zero-length file</expected-output>
+          <expected-output>touchz: `hdfs://\w+[.a-z]*:[0-9]+/data15bytes': Not a zero-length file</expected-output>
         </comparator>
       </comparators>
     </test>

Modified: hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/DFSTestUtil.java Wed May 18 23:44:23 2011
@@ -56,10 +56,12 @@ import org.apache.hadoop.hdfs.protocol.D
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
+import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.TestTransferRbw;
 import org.apache.hadoop.hdfs.server.namenode.DatanodeDescriptor;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.net.NetUtils;
@@ -95,6 +97,19 @@ public class DFSTestUtil {
     this.maxSize = maxSize;
   }
   
+  /**
+   * when formating a namenode - we must provide clusterid.
+   * @param conf
+   * @throws IOException
+   */
+  public static void formatNameNode(Configuration conf) throws IOException {
+    String clusterId = StartupOption.FORMAT.getClusterId();
+    if(clusterId == null || clusterId.isEmpty())
+      StartupOption.FORMAT.setClusterId("testClusterID");
+
+    NameNode.format(conf);
+  }
+  
   /** class MyFile contains enough information to recreate the contents of
    * a single file.
    */

Modified: hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/MiniDFSCluster.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/MiniDFSCluster.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/MiniDFSCluster.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/MiniDFSCluster.java Wed May 18 23:44:23 2011
@@ -61,6 +61,7 @@ import org.apache.hadoop.hdfs.server.pro
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.hdfs.tools.DFSAdmin;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.net.DNSToSwitchMapping;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.StaticMapping;
@@ -69,6 +70,7 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.tools.GetUserMappingsProtocol;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -82,6 +84,8 @@ public class MiniDFSCluster {
   private static final String NAMESERVICE_ID_PREFIX = "nameserviceId";
   private static final Log LOG = LogFactory.getLog(MiniDFSCluster.class);
 
+  static { DefaultMetricsSystem.setMiniClusterMode(true); }
+
   /**
    * Class to construct instances of MiniDFSClusters with specific options.
    */
@@ -485,6 +489,7 @@ public class MiniDFSCluster {
         setRpcEngine(conf, DatanodeProtocol.class, rpcEngine);
         setRpcEngine(conf, RefreshAuthorizationPolicyProtocol.class, rpcEngine);
         setRpcEngine(conf, RefreshUserMappingsProtocol.class, rpcEngine);
+        setRpcEngine(conf, GetUserMappingsProtocol.class, rpcEngine);
       } catch (ClassNotFoundException e) {
         throw new RuntimeException(e);
       }
@@ -593,7 +598,7 @@ public class MiniDFSCluster {
     
     // Format and clean out DataNode directories
     if (format) {
-      GenericTestUtils.formatNamenode(conf);
+      DFSTestUtil.formatNameNode(conf);
     }
     if (operation == StartupOption.UPGRADE){
       operation.setClusterId(clusterId);

Modified: hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java Wed May 18 23:44:23 2011
@@ -1028,17 +1028,43 @@ public class TestDFSShell extends TestCa
 
       // Verify touch/test
       {
-        String[] args = new String[2];
+        String[] args;
+        int val;
+
+        args = new String[3];
+        args[0] = "-test";
+        args[1] = "-e";
+        args[2] = "/test/mkdirs/noFileHere";
+        val = -1;
+        try {
+          val = shell.run(args);
+        } catch (Exception e) {
+          System.err.println("Exception raised from DFSShell.run " +
+                             e.getLocalizedMessage());
+        }
+        assertEquals(1, val);
+
+        args[1] = "-z";
+        val = -1;
+        try {
+          val = shell.run(args);
+        } catch (Exception e) {
+          System.err.println("Exception raised from DFSShell.run " +
+                             e.getLocalizedMessage());
+        }
+        assertEquals(1, val);
+
+        args = new String[2];
         args[0] = "-touchz";
         args[1] = "/test/mkdirs/noFileHere";
-        int val = -1;
+        val = -1;
         try {
           val = shell.run(args);
         } catch (Exception e) {
           System.err.println("Exception raised from DFSShell.run " +
                              e.getLocalizedMessage());
         }
-        assertTrue(val == 0);
+        assertEquals(0, val);
 
         args = new String[3];
         args[0] = "-test";
@@ -1051,7 +1077,27 @@ public class TestDFSShell extends TestCa
           System.err.println("Exception raised from DFSShell.run " +
                              e.getLocalizedMessage());
         }
-        assertTrue(val == 0);
+        assertEquals(0, val);
+
+        args[1] = "-d";
+        val = -1;
+        try {
+          val = shell.run(args);
+        } catch (Exception e) {
+          System.err.println("Exception raised from DFSShell.run " +
+                             e.getLocalizedMessage());
+        }
+        assertEquals(1, val);
+
+        args[1] = "-z";
+        val = -1;
+        try {
+          val = shell.run(args);
+        } catch (Exception e) {
+          System.err.println("Exception raised from DFSShell.run " +
+                             e.getLocalizedMessage());
+        }
+        assertEquals(0, val);
       }
 
       // Verify that cp from a directory to a subdirectory fails

Modified: hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUpgrade.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUpgrade.java?rev=1124466&r1=1124465&r2=1124466&view=diff
==============================================================================
--- hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUpgrade.java (original)
+++ hadoop/hdfs/branches/HDFS-1073/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSUpgrade.java Wed May 18 23:44:23 2011
@@ -93,10 +93,22 @@ public class TestDFSUpgrade {
       assertEquals(UpgradeUtilities.checksumContents(DATA_NODE, current),
         UpgradeUtilities.checksumMasterDataNodeContents());
       
+      // block files are placed under <sd>/current/<bpid>/current/finalized
+      File currentFinalized = 
+        MiniDFSCluster.getFinalizedDir(new File(baseDirs[i]), bpid);
+      assertEquals(UpgradeUtilities.checksumContents(DATA_NODE, currentFinalized),
+          UpgradeUtilities.checksumMasterBlockPoolFinalizedContents());
+      
       File previous = new File(baseDirs[i], "current/" + bpid + "/previous");
       assertTrue(previous.isDirectory());
       assertEquals(UpgradeUtilities.checksumContents(DATA_NODE, previous),
           UpgradeUtilities.checksumMasterDataNodeContents());
+      
+      File previousFinalized = 
+        new File(baseDirs[i], "current/" + bpid + "/previous"+"/finalized");
+      assertEquals(UpgradeUtilities.checksumContents(DATA_NODE, previousFinalized),
+          UpgradeUtilities.checksumMasterBlockPoolFinalizedContents());
+      
     }
   }
   /**



Mime
View raw message