hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1310174 [1/3] - in /hadoop/common/branches/HDFS-3042/hadoop-hdfs-project: dev-support/ hadoop-hdfs-httpfs/ hadoop-hdfs/ hadoop-hdfs/src/main/java/ hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ hadoop-hdfs/src/main/java/org/apache/hadoo...
Date Fri, 06 Apr 2012 05:07:38 GMT
Author: todd
Date: Fri Apr  6 05:07:33 2012
New Revision: 1310174

URL: http://svn.apache.org/viewvc?rev=1310174&view=rev
Log:
Merge trunk into auto-failover branch.

Addressed two semantic conflicts after the commit of HADOOP-8077 -- test code was referencing the old constant for the fencing methods config key.

Added:
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/journalservice/
      - copied from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/journalservice/
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/journalservice/JournalListener.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/journalservice/JournalListener.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/journalservice/JournalService.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/journalservice/JournalService.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsBinaryLoader.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsBinaryLoader.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsLoader.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsLoader.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitor.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitor.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/TeeOutputStream.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/TeeOutputStream.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/XMLUtils.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/XMLUtils.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/journalservice/
      - copied from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/journalservice/
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/journalservice/TestJournalService.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/journalservice/TestJournalService.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestInitializeSharedEdits.java
      - copied unchanged from r1310173, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestInitializeSharedEdits.java
Removed:
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalService.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/BinaryTokenizer.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/EditsElement.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/EditsLoader.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/EditsLoaderCurrent.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/EditsVisitor.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/EditsVisitorFactory.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/TextEditsVisitor.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/Tokenizer.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/TokenizerFactory.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/XmlTokenizer.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestJournalService.java
Modified:
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/dev-support/test-patch.properties
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/   (props changed)
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolServerSideTranslatorPB.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupImage.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeRegistration.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/NNHAServiceTarget.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/BinaryEditsVisitor.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsViewer.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/XmlEditsVisitor.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/native/   (props changed)
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/   (props changed)
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/   (props changed)
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/   (props changed)
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/hdfs/   (props changed)
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommission.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReplaceDatanodeOnFailure.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestBootstrapStandby.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDFSZKFailoverController.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/TestOfflineEditsViewer.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
    hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/editsStored.xml

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/dev-support/test-patch.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/dev-support/test-patch.properties?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/dev-support/test-patch.properties (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/dev-support/test-patch.properties Fri Apr  6 05:07:33 2012
@@ -18,4 +18,4 @@
 
 OK_RELEASEAUDIT_WARNINGS=0
 OK_FINDBUGS_WARNINGS=0
-OK_JAVADOC_WARNINGS=0
+OK_JAVADOC_WARNINGS=8

Propchange: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:r1309568-1310173

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml Fri Apr  6 05:07:33 2012
@@ -146,10 +146,6 @@
           <artifactId>jets3t</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>hsqldb</groupId>
-          <artifactId>hsqldb</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>org.eclipse.jdt</groupId>
           <artifactId>core</artifactId>
         </exclusion>
@@ -213,10 +209,6 @@
           <artifactId>jets3t</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>hsqldb</groupId>
-          <artifactId>hsqldb</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>org.eclipse.jdt</groupId>
           <artifactId>core</artifactId>
         </exclusion>

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Fri Apr  6 05:07:33 2012
@@ -59,6 +59,12 @@ Trunk (unreleased changes)
 
     HDFS-3131. Improve TestStorageRestore. (Brandon Li via atm)
 
+    HDFS-3178. Add states and state handler for journal synchronization in
+    JournalService.  (szetszwo)
+
+    HDFS-3204. Minor modification to JournalProtocol.proto to make
+    it generic. (suresh)
+
   OPTIMIZATIONS
 
     HDFS-2834. Add a ByteBuffer-based read API to DFSInputStream.
@@ -184,6 +190,8 @@ Release 2.0.0 - UNRELEASED 
 
     HDFS-3000. Add a public API for setting quotas. (atm)
 
+    HDFS-3102. Add CLI tool to initialize the shared-edits dir. (atm)
+
   IMPROVEMENTS
 
     HDFS-2018. Move all journal stream management code into one place.
@@ -316,6 +324,9 @@ Release 2.0.0 - UNRELEASED 
     HDFS-3084. FenceMethod.tryFence() and ShellCommandFencer should pass
     namenodeId as well as host:port (todd)
 
+    HDFS-3050. rework OEV to share more code with the NameNode.
+    (Colin Patrick McCabe via eli)
+
   OPTIMIZATIONS
 
     HDFS-3024. Improve performance of stringification in addStoredBlock (todd)
@@ -412,6 +423,19 @@ Release 2.0.0 - UNRELEASED 
 
     HDFS-3174. Fix assert in TestPendingDataNodeMessages. (eli)
 
+    HDFS-3199. TestValidateConfigurationSettings is failing. (todd via eli)
+
+    HDFS-3202. NamespaceInfo PB translation drops build version. (atm)
+
+    HDFS-3109. Remove hsqldf exclusions from pom.xml. (Ravi Prakash
+    via suresh)
+
+    HDFS-3210. JsonUtil#toJsonMap for for a DatanodeInfo should use
+    "ipAddr" instead of "name". (eli)
+
+    HDFS-3208. Bogus entries in hosts files are incorrectly displayed
+    in the report. (eli)
+
   BREAKDOWN OF HDFS-1623 SUBTASKS
 
     HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd)

Propchange: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java:r1309568-1310173

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java Fri Apr  6 05:07:33 2012
@@ -211,7 +211,8 @@ public class DFSConfigKeys extends Commo
   public static final String  DFS_BALANCER_MOVEDWINWIDTH_KEY = "dfs.balancer.movedWinWidth";
   public static final long    DFS_BALANCER_MOVEDWINWIDTH_DEFAULT = 5400*1000L;
   public static final String  DFS_DATANODE_ADDRESS_KEY = "dfs.datanode.address";
-  public static final String  DFS_DATANODE_ADDRESS_DEFAULT = "0.0.0.0:50010";
+  public static final int     DFS_DATANODE_DEFAULT_PORT = 50010;
+  public static final String  DFS_DATANODE_ADDRESS_DEFAULT = "0.0.0.0:" + DFS_DATANODE_DEFAULT_PORT;
   public static final String  DFS_DATANODE_DATA_DIR_PERMISSION_KEY = "dfs.datanode.data.dir.perm";
   public static final String  DFS_DATANODE_DATA_DIR_PERMISSION_DEFAULT = "700";
   public static final String  DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY = "dfs.datanode.directoryscan.interval";
@@ -227,7 +228,8 @@ public class DFSConfigKeys extends Commo
   public static final String  DFS_DATANODE_HANDLER_COUNT_KEY = "dfs.datanode.handler.count";
   public static final int     DFS_DATANODE_HANDLER_COUNT_DEFAULT = 3;
   public static final String  DFS_DATANODE_HTTP_ADDRESS_KEY = "dfs.datanode.http.address";
-  public static final String  DFS_DATANODE_HTTP_ADDRESS_DEFAULT = "0.0.0.0:50075";
+  public static final int     DFS_DATANODE_HTTP_DEFAULT_PORT = 50075;
+  public static final String  DFS_DATANODE_HTTP_ADDRESS_DEFAULT = "0.0.0.0:" + DFS_DATANODE_HTTP_DEFAULT_PORT;
   public static final String  DFS_DATANODE_MAX_RECEIVER_THREADS_KEY = "dfs.datanode.max.transfer.threads";
   public static final int     DFS_DATANODE_MAX_RECEIVER_THREADS_DEFAULT = 4096;
   public static final String  DFS_DATANODE_NUMBLOCKS_KEY = "dfs.datanode.numblocks";
@@ -251,13 +253,15 @@ public class DFSConfigKeys extends Commo
   public static final String  DFS_HTTPS_ENABLE_KEY = "dfs.https.enable";
   public static final boolean DFS_HTTPS_ENABLE_DEFAULT = false;
   public static final String  DFS_HTTPS_PORT_KEY = "dfs.https.port";
-  public static final int     DFS_HTTPS_PORT_DEFAULT = 50470;
   public static final String  DFS_DEFAULT_CHUNK_VIEW_SIZE_KEY = "dfs.default.chunk.view.size";
   public static final int     DFS_DEFAULT_CHUNK_VIEW_SIZE_DEFAULT = 32*1024;
   public static final String  DFS_DATANODE_HTTPS_ADDRESS_KEY = "dfs.datanode.https.address";
-  public static final String  DFS_DATANODE_HTTPS_ADDRESS_DEFAULT = "0.0.0.0:50475";
+  public static final String  DFS_DATANODE_HTTPS_PORT_KEY = "datanode.https.port";
+  public static final int     DFS_DATANODE_HTTPS_DEFAULT_PORT = 50475;
+  public static final String  DFS_DATANODE_HTTPS_ADDRESS_DEFAULT = "0.0.0.0:" + DFS_DATANODE_HTTPS_DEFAULT_PORT;
   public static final String  DFS_DATANODE_IPC_ADDRESS_KEY = "dfs.datanode.ipc.address";
-  public static final String  DFS_DATANODE_IPC_ADDRESS_DEFAULT = "0.0.0.0:50020";
+  public static final int     DFS_DATANODE_IPC_DEFAULT_PORT = 50020;
+  public static final String  DFS_DATANODE_IPC_ADDRESS_DEFAULT = "0.0.0.0" + DFS_DATANODE_IPC_DEFAULT_PORT;
 
   public static final String  DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY = "dfs.block.access.token.enable";
   public static final boolean DFS_BLOCK_ACCESS_TOKEN_ENABLE_DEFAULT = false;
@@ -341,4 +345,5 @@ public class DFSConfigKeys extends Commo
   public static final int DFS_HA_LOGROLL_PERIOD_DEFAULT = 2 * 60; // 2m
   public static final String DFS_HA_TAILEDITS_PERIOD_KEY = "dfs.ha.tail-edits.period";
   public static final int DFS_HA_TAILEDITS_PERIOD_DEFAULT = 60; // 1m
+  public static final String DFS_HA_FENCE_METHODS_KEY = "dfs.ha.fencing.methods";
 }

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java Fri Apr  6 05:07:33 2012
@@ -24,6 +24,7 @@ import java.io.IOException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
 
@@ -50,14 +51,21 @@ public class DatanodeID implements Writa
   protected int infoPort;      // info server port
   protected int ipcPort;       // IPC server port
 
-  /** Equivalent to DatanodeID(""). */
-  public DatanodeID() {this("");}
+  public DatanodeID() {
+    this("", DFSConfigKeys.DFS_DATANODE_DEFAULT_PORT);
+  }
 
-  /** Equivalent to DatanodeID(ipAddr, "", -1, -1, -1). */
-  public DatanodeID(String ipAddr) {this(ipAddr, "", "", -1, -1, -1);}
+  public DatanodeID(String ipAddr, int xferPort) {
+    this(ipAddr, "", "", xferPort,
+        DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT,
+        DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT);
+  }
 
-  /** Equivalent to DatanodeID(ipAddr, "", xferPort, -1, -1). */
-  public DatanodeID(String ipAddr, int xferPort) {this(ipAddr, "", "", xferPort, -1, -1);}
+  public DatanodeID(String ipAddr, String hostName, int xferPort) {
+    this(ipAddr, hostName, "", xferPort,
+        DFSConfigKeys.DFS_DATANODE_HTTP_DEFAULT_PORT,
+        DFSConfigKeys.DFS_DATANODE_IPC_DEFAULT_PORT);
+  }
 
   /**
    * DatanodeID copy constructor

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java Fri Apr  6 05:07:33 2012
@@ -122,12 +122,12 @@ public class DatanodeInfo extends Datano
   }
 
   /** Constructor */
-  public DatanodeInfo(final String name, final String hostName,
+  public DatanodeInfo(final String ipAddr, final String hostName,
       final String storageID, final int xferPort, final int infoPort, final int ipcPort,
       final long capacity, final long dfsUsed, final long remaining,
       final long blockPoolUsed, final long lastUpdate, final int xceiverCount,
       final String networkLocation, final AdminStates adminState) {
-    super(name, hostName, storageID, xferPort, infoPort, ipcPort);
+    super(ipAddr, hostName, storageID, xferPort, infoPort, ipcPort);
     this.capacity = capacity;
     this.dfsUsed = dfsUsed;
     this.remaining = remaining;
@@ -265,7 +265,7 @@ public class DatanodeInfo extends Datano
     long c = getCapacity();
     long r = getRemaining();
     long u = getDfsUsed();
-    buffer.append(ipAddr);
+    buffer.append(getName());
     if (!NetworkTopology.DEFAULT_RACK.equals(location)) {
       buffer.append(" "+location);
     }

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolServerSideTranslatorPB.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolServerSideTranslatorPB.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolServerSideTranslatorPB.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolServerSideTranslatorPB.java Fri Apr  6 05:07:33 2012
@@ -48,7 +48,7 @@ public class JournalProtocolServerSideTr
   public JournalResponseProto journal(RpcController unused,
       JournalRequestProto req) throws ServiceException {
     try {
-      impl.journal(PBHelper.convert(req.getRegistration()),
+      impl.journal(PBHelper.convert(req.getJournalInfo()),
           req.getFirstTxnId(), req.getNumTxns(), req.getRecords()
               .toByteArray());
     } catch (IOException e) {
@@ -62,7 +62,7 @@ public class JournalProtocolServerSideTr
   public StartLogSegmentResponseProto startLogSegment(RpcController controller,
       StartLogSegmentRequestProto req) throws ServiceException {
     try {
-      impl.startLogSegment(PBHelper.convert(req.getRegistration()),
+      impl.startLogSegment(PBHelper.convert(req.getJournalInfo()),
           req.getTxid());
     } catch (IOException e) {
       throw new ServiceException(e);

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java Fri Apr  6 05:07:33 2012
@@ -24,12 +24,10 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.proto.JournalProtocolProtos.JournalRequestProto;
 import org.apache.hadoop.hdfs.protocol.proto.JournalProtocolProtos.StartLogSegmentRequestProto;
-import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
 import org.apache.hadoop.hdfs.server.protocol.JournalProtocol;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration;
 import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolMetaInterface;
-import org.apache.hadoop.ipc.ProtocolSignature;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RpcClientUtil;
 import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
@@ -63,7 +61,7 @@ public class JournalProtocolTranslatorPB
   public void journal(NamenodeRegistration reg, long firstTxnId,
       int numTxns, byte[] records) throws IOException {
     JournalRequestProto req = JournalRequestProto.newBuilder()
-        .setRegistration(PBHelper.convert(reg))
+        .setJournalInfo(PBHelper.convertToJournalInfo(reg))
         .setFirstTxnId(firstTxnId)
         .setNumTxns(numTxns)
         .setRecords(PBHelper.getByteString(records))
@@ -79,7 +77,7 @@ public class JournalProtocolTranslatorPB
   public void startLogSegment(NamenodeRegistration registration, long txid)
       throws IOException {
     StartLogSegmentRequestProto req = StartLogSegmentRequestProto.newBuilder()
-        .setRegistration(PBHelper.convert(registration))
+        .setJournalInfo(PBHelper.convertToJournalInfo(registration))
         .setTxid(txid)
         .build();
     try {

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java Fri Apr  6 05:07:33 2012
@@ -95,6 +95,7 @@ import org.apache.hadoop.hdfs.protocol.p
 import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ReplicaStateProto;
 import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageInfoProto;
 import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.UpgradeStatusReportProto;
+import org.apache.hadoop.hdfs.protocol.proto.JournalProtocolProtos.JournalInfoProto;
 import org.apache.hadoop.hdfs.security.token.block.BlockKey;
 import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
 import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
@@ -127,7 +128,6 @@ import org.apache.hadoop.hdfs.server.pro
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
 import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
 import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations;
-import org.apache.hadoop.hdfs.server.protocol.StorageReport;
 import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
 import org.apache.hadoop.io.EnumSetWritable;
 import org.apache.hadoop.io.Text;
@@ -384,7 +384,8 @@ public class PBHelper {
   public static NamespaceInfo convert(NamespaceInfoProto info) {
     StorageInfoProto storage = info.getStorageInfo();
     return new NamespaceInfo(storage.getNamespceID(), storage.getClusterID(),
-        info.getBlockPoolID(), storage.getCTime(), info.getDistUpgradeVersion());
+        info.getBlockPoolID(), storage.getCTime(), info.getDistUpgradeVersion(),
+        info.getBuildVersion());
   }
 
   public static NamenodeCommand convert(NamenodeCommandProto cmd) {
@@ -1345,4 +1346,26 @@ public class PBHelper {
         .setDfsUsed(r.getDfsUsed()).setRemaining(r.getRemaining())
         .setStorageID(r.getStorageID()).build();
   }
+
+  public static NamenodeRegistration convert(JournalInfoProto info) {
+    int lv = info.hasLayoutVersion() ? info.getLayoutVersion() : 0;
+    int nsID = info.hasNamespaceID() ? info.getNamespaceID() : 0;
+    StorageInfo storage = new StorageInfo(lv, nsID, info.getClusterID(), 0);
+    
+    // Note that the role is always {@link NamenodeRole#NAMENODE} as this
+    // conversion happens for messages from Namenode to Journal receivers.
+    // Addresses in the registration are unused.
+    return new NamenodeRegistration("", "", storage, NamenodeRole.NAMENODE);
+  }
+
+  /**
+   * Method used for converting {@link JournalInfoProto} sent from Namenode
+   * to Journal receivers to {@link NamenodeRegistration}.
+   */
+  public static JournalInfoProto convertToJournalInfo(
+      NamenodeRegistration reg) {
+    return JournalInfoProto.newBuilder().setClusterID(reg.getClusterID())
+        .setLayoutVersion(reg.getLayoutVersion())
+        .setNamespaceID(reg.getNamespaceID()).build();
+  }
 }

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeDescriptor.java Fri Apr  6 05:07:33 2012
@@ -17,8 +17,6 @@
  */
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
-import java.io.DataInput;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.LinkedList;
@@ -26,13 +24,10 @@ import java.util.List;
 import java.util.Queue;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hdfs.DeprecatedUTF8;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.util.LightWeightHashSet;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableUtils;
 
 /**
  * This class extends the DatanodeInfo class with ephemeral information (eg
@@ -158,18 +153,18 @@ public class DatanodeDescriptor extends 
    */
   private boolean disallowed = false;
 
-  /** Default constructor */
   public DatanodeDescriptor() {}
   
-  /** DatanodeDescriptor constructor
+  /**
+   * DatanodeDescriptor constructor
    * @param nodeID id of the data node
    */
   public DatanodeDescriptor(DatanodeID nodeID) {
     this(nodeID, 0L, 0L, 0L, 0L, 0, 0);
   }
 
-  /** DatanodeDescriptor constructor
-   * 
+  /**
+   * DatanodeDescriptor constructor
    * @param nodeID id of the data node
    * @param networkLocation location of the data node in network
    */
@@ -178,8 +173,8 @@ public class DatanodeDescriptor extends 
     this(nodeID, networkLocation, 0L, 0L, 0L, 0L, 0, 0);
   }
   
-  /** DatanodeDescriptor constructor
-   * 
+  /**
+   * DatanodeDescriptor constructor
    * @param nodeID id of the data node
    * @param capacity capacity of the data node
    * @param dfsUsed space used by the data node
@@ -199,8 +194,8 @@ public class DatanodeDescriptor extends 
         failedVolumes);
   }
 
-  /** DatanodeDescriptor constructor
-   * 
+  /**
+   * DatanodeDescriptor constructor
    * @param nodeID id of the data node
    * @param networkLocation location of the data node in network
    * @param capacity capacity of the data node, including space used by non-dfs

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java Fri Apr  6 05:07:33 2012
@@ -71,6 +71,8 @@ import org.apache.hadoop.util.Daemon;
 import org.apache.hadoop.util.HostsFileReader;
 import org.apache.hadoop.util.ReflectionUtils;
 
+import com.google.common.net.InetAddresses;
+
 /**
  * Manage datanodes, include decommission and other activities.
  */
@@ -353,14 +355,9 @@ public class DatanodeManager {
   private void resolveNetworkLocation (DatanodeDescriptor node) {
     List<String> names = new ArrayList<String>(1);
     if (dnsToSwitchMapping instanceof CachedDNSToSwitchMapping) {
-      // get the node's IP address
       names.add(node.getIpAddr());
     } else {
-      // get the node's host name
-      String hostName = node.getHostName();
-      int colon = hostName.indexOf(":");
-      hostName = (colon==-1)?hostName:hostName.substring(0,colon);
-      names.add(hostName);
+      names.add(node.getHostName());
     }
     
     // resolve its network location
@@ -771,6 +768,40 @@ public class DatanodeManager {
     }
   }
 
+  /**
+   * Parse a DatanodeID from a hosts file entry
+   * @param hostLine of form [hostname|ip][:port]?
+   * @return DatanodeID constructed from the given string
+   */
+  private DatanodeID parseDNFromHostsEntry(String hostLine) {
+    DatanodeID dnId;
+    String hostStr;
+    int port;
+    int idx = hostLine.indexOf(':');
+
+    if (-1 == idx) {
+      hostStr = hostLine;
+      port = DFSConfigKeys.DFS_DATANODE_DEFAULT_PORT;
+    } else {
+      hostStr = hostLine.substring(0, idx);
+      port = Integer.valueOf(hostLine.substring(idx));
+    }
+
+    if (InetAddresses.isInetAddress(hostStr)) {
+      // The IP:port is sufficient for listing in a report
+      dnId = new DatanodeID(hostStr, "", port);
+    } else {
+      String ipAddr = "";
+      try {
+        ipAddr = InetAddress.getByName(hostStr).getHostAddress();
+      } catch (UnknownHostException e) {
+        LOG.warn("Invalid hostname " + hostStr + " in hosts file");
+      }
+      dnId = new DatanodeID(ipAddr, hostStr, port);
+    }
+    return dnId;
+  }
+
   /** For generating datanode reports */
   public List<DatanodeDescriptor> getDatanodeListForReport(
       final DatanodeReportType type) {
@@ -782,7 +813,7 @@ public class DatanodeManager {
     HashMap<String, String> mustList = new HashMap<String, String>();
 
     if (listDeadNodes) {
-      //first load all the nodes listed in include and exclude files.
+      // Put all nodes referenced in the hosts files in the map
       Iterator<String> it = hostsReader.getHosts().iterator();
       while (it.hasNext()) {
         mustList.put(it.next(), "");
@@ -805,7 +836,7 @@ public class DatanodeManager {
         if ( (isDead && listDeadNodes) || (!isDead && listLiveNodes) ) {
           nodes.add(dn);
         }
-        //Remove any form of the this datanode in include/exclude lists.
+        // Remove any nodes we know about from the map
         try {
           InetAddress inet = InetAddress.getByName(dn.getIpAddr());
           // compare hostname(:port)
@@ -814,7 +845,7 @@ public class DatanodeManager {
           // compare ipaddress(:port)
           mustList.remove(inet.getHostAddress().toString());
           mustList.remove(inet.getHostAddress().toString()+ ":" +dn.getXferPort());
-        } catch ( UnknownHostException e ) {
+        } catch (UnknownHostException e) {
           mustList.remove(dn.getName());
           mustList.remove(dn.getIpAddr());
           LOG.warn(e);
@@ -825,9 +856,13 @@ public class DatanodeManager {
     if (listDeadNodes) {
       Iterator<String> it = mustList.keySet().iterator();
       while (it.hasNext()) {
-        DatanodeDescriptor dn = 
-            new DatanodeDescriptor(new DatanodeID(it.next()));
-        dn.setLastUpdate(0);
+        // The remaining nodes are ones that are referenced by the hosts
+        // files but that we do not know about, ie that we have never
+        // head from. Eg. a host that is no longer part of the cluster
+        // or a bogus entry was given in the hosts files
+        DatanodeID dnId = parseDNFromHostsEntry(it.next());
+        DatanodeDescriptor dn = new DatanodeDescriptor(dnId); 
+        dn.setLastUpdate(0); // Consider this node dead for reporting
         nodes.add(dn);
       }
     }

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java Fri Apr  6 05:07:33 2012
@@ -53,7 +53,8 @@ public final class HdfsServerConstants {
     ROLLBACK("-rollback"),
     FINALIZE("-finalize"),
     IMPORT  ("-importCheckpoint"),
-    BOOTSTRAPSTANDBY("-bootstrapStandby");
+    BOOTSTRAPSTANDBY("-bootstrapStandby"),
+    INITIALIZESHAREDEDITS("-initializeSharedEdits");
     
     private String name = null;
     

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java Fri Apr  6 05:07:33 2012
@@ -668,8 +668,7 @@ public class DataNode extends Configured
    */
   DatanodeRegistration createBPRegistration(NamespaceInfo nsInfo) {
     final String xferIp = streamingAddr.getAddress().getHostAddress();
-    DatanodeRegistration bpRegistration = new DatanodeRegistration(xferIp);
-    bpRegistration.setXferPort(getXferPort());
+    DatanodeRegistration bpRegistration = new DatanodeRegistration(xferIp, getXferPort());
     bpRegistration.setInfoPort(getInfoPort());
     bpRegistration.setIpcPort(getIpcPort());
     bpRegistration.setHostName(hostName);

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupImage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupImage.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupImage.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupImage.java Fri Apr  6 05:07:33 2012
@@ -331,8 +331,7 @@ public class BackupImage extends FSImage
    */
   private synchronized void setState(BNState newState) {
     if (LOG.isDebugEnabled()) {
-      LOG.debug("State transition " + bnState + " -> " + newState,
-          new Exception("trace"));
+      LOG.debug("State transition " + bnState + " -> " + newState);
     }
     bnState = newState;
   }

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java Fri Apr  6 05:07:33 2012
@@ -27,17 +27,15 @@ import org.apache.hadoop.fs.CommonConfig
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.NameNodeProxies;
-import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
+import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
 import org.apache.hadoop.hdfs.protocol.proto.JournalProtocolProtos.JournalProtocolService;
 import org.apache.hadoop.hdfs.protocolPB.JournalProtocolPB;
 import org.apache.hadoop.hdfs.protocolPB.JournalProtocolServerSideTranslatorPB;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
 import org.apache.hadoop.hdfs.server.common.Storage;
-import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
 import org.apache.hadoop.hdfs.server.protocol.JournalProtocol;
-import org.apache.hadoop.hdfs.server.protocol.NamenodeCommand;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration;
 import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
@@ -225,8 +223,6 @@ public class BackupNode extends NameNode
   
   static class BackupNodeRpcServer extends NameNodeRpcServer implements
       JournalProtocol {
-    private final String nnRpcAddress;
-    
     private BackupNodeRpcServer(Configuration conf, BackupNode nn)
         throws IOException {
       super(conf, nn);
@@ -236,8 +232,31 @@ public class BackupNode extends NameNode
           .newReflectiveBlockingService(journalProtocolTranslator);
       DFSUtil.addPBProtocol(conf, JournalProtocolPB.class, service,
           this.clientRpcServer);
-      nnRpcAddress = nn.nnRpcAddress;
     }
+    
+    /** 
+     * Verifies a journal request
+     * @param nodeReg node registration
+     * @throws UnregisteredNodeException if the registration is invalid
+     */
+    void verifyJournalRequest(NamenodeRegistration reg) throws IOException {
+      verifyVersion(reg.getLayoutVersion());
+      String errorMsg = null;
+      int expectedNamespaceID = namesystem.getNamespaceInfo().getNamespaceID();
+      if (reg.getNamespaceID() != expectedNamespaceID) {
+        errorMsg = "Invalid namespaceID in journal request - expected " + expectedNamespaceID
+            + " actual " + reg.getNamespaceID();
+        LOG.warn(errorMsg);
+        throw new UnregisteredNodeException(reg);
+      } 
+      if (!reg.getClusterID().equals(namesystem.getClusterId())) {
+        errorMsg = "Invalid clusterId in journal request - expected "
+            + reg.getClusterID() + " actual " + namesystem.getClusterId();
+        LOG.warn(errorMsg);
+        throw new UnregisteredNodeException(reg);
+      }
+    }
+
 
     /////////////////////////////////////////////////////
     // BackupNodeProtocol implementation for backup node.
@@ -246,8 +265,7 @@ public class BackupNode extends NameNode
     public void startLogSegment(NamenodeRegistration registration, long txid)
         throws IOException {
       namesystem.checkOperation(OperationCategory.JOURNAL);
-      verifyRequest(registration);
-      
+      verifyJournalRequest(registration);
       getBNImage().namenodeStartedLogSegment(txid);
     }
     
@@ -256,10 +274,7 @@ public class BackupNode extends NameNode
         long firstTxId, int numTxns,
         byte[] records) throws IOException {
       namesystem.checkOperation(OperationCategory.JOURNAL);
-      verifyRequest(nnReg);
-      if(!nnRpcAddress.equals(nnReg.getAddress()))
-        throw new IOException("Journal request from unexpected name-node: "
-            + nnReg.getAddress() + " expecting " + nnRpcAddress);
+      verifyJournalRequest(nnReg);
       getBNImage().journal(firstTxId, numTxns, records);
     }
 

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java Fri Apr  6 05:07:33 2012
@@ -33,7 +33,7 @@ import com.google.common.annotations.Vis
  * An implementation of the abstract class {@link EditLogInputStream}, which
  * reads edits from a local file.
  */
-class EditLogFileInputStream extends EditLogInputStream {
+public class EditLogFileInputStream extends EditLogInputStream {
   private final File file;
   private final FileInputStream fStream;
   final private long firstTxId;
@@ -67,7 +67,7 @@ class EditLogFileInputStream extends Edi
    * @throws IOException if an actual IO error occurs while reading the
    *         header
    */
-  EditLogFileInputStream(File name, long firstTxId, long lastTxId,
+  public EditLogFileInputStream(File name, long firstTxId, long lastTxId,
       boolean isInProgress)
       throws LogHeaderCorruptException, IOException {
     file = name;

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java Fri Apr  6 05:07:33 2012
@@ -64,7 +64,7 @@ public class EditLogFileOutputStream ext
    *          Size of flush buffer
    * @throws IOException
    */
-  EditLogFileOutputStream(File name, int size) throws IOException {
+  public EditLogFileOutputStream(File name, int size) throws IOException {
     super();
     file = name;
     doubleBuf = new EditsDoubleBuffer(size);
@@ -174,7 +174,7 @@ public class EditLogFileOutputStream ext
    * accumulates new log records while readyBuffer will be flushed and synced.
    */
   @Override
-  protected void flushAndSync() throws IOException {
+  public void flushAndSync() throws IOException {
     if (fp == null) {
       throw new IOException("Trying to use aborted output stream");
     }

Modified: hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java?rev=1310174&r1=1310173&r2=1310174&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java Fri Apr  6 05:07:33 2012
@@ -21,7 +21,10 @@ import java.util.zip.CheckedInputStream;
 import java.util.zip.Checksum;
 import java.util.Arrays;
 import java.util.EnumMap;
+import java.util.List;
 
+import org.apache.commons.codec.DecoderException;
+import org.apache.commons.codec.binary.Hex;
 import org.apache.hadoop.fs.ChecksumException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -39,10 +42,17 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.ArrayWritable;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
+import org.apache.hadoop.hdfs.util.XMLUtils;
+import org.apache.hadoop.hdfs.util.XMLUtils.InvalidXmlException;
+import org.apache.hadoop.hdfs.util.XMLUtils.Stanza;
 import org.apache.hadoop.hdfs.DeprecatedUTF8;
+import org.xml.sax.ContentHandler;
+import org.xml.sax.SAXException;
+import org.xml.sax.helpers.AttributesImpl;
 
 import java.io.DataInput;
 import java.io.DataOutput;
@@ -59,7 +69,7 @@ import java.io.EOFException;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public abstract class FSEditLogOp {
-  final FSEditLogOpCodes opCode;
+  public final FSEditLogOpCodes opCode;
   long txid;
 
 
@@ -121,7 +131,7 @@ public abstract class FSEditLogOp {
   abstract void readFields(DataInputStream in, int logVersion)
       throws IOException;
 
-  abstract void writeFields(DataOutputStream out)
+  public abstract void writeFields(DataOutputStream out)
       throws IOException;
 
   static interface BlockListUpdatingOp {
@@ -201,7 +211,8 @@ public abstract class FSEditLogOp {
       return (T)this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(path, out);
       FSImageSerialization.writeShort(replication, out);
@@ -312,6 +323,49 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "LENGTH",
+          Integer.valueOf(length).toString());
+      XMLUtils.addSaxString(contentHandler, "PATH", path);
+      XMLUtils.addSaxString(contentHandler, "REPLICATION",
+          Short.valueOf(replication).toString());
+      XMLUtils.addSaxString(contentHandler, "MTIME",
+          Long.valueOf(mtime).toString());
+      XMLUtils.addSaxString(contentHandler, "ATIME",
+          Long.valueOf(atime).toString());
+      XMLUtils.addSaxString(contentHandler, "BLOCKSIZE",
+          Long.valueOf(blockSize).toString());
+      XMLUtils.addSaxString(contentHandler, "CLIENT_NAME", clientName);
+      XMLUtils.addSaxString(contentHandler, "CLIENT_MACHINE", clientMachine);
+      for (Block b : blocks) {
+        FSEditLogOp.blockToXml(contentHandler, b);
+      }
+      FSEditLogOp.permissionStatusToXml(contentHandler, permissions);
+    }
+
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.length = Integer.valueOf(st.getValue("LENGTH"));
+      this.path = st.getValue("PATH");
+      this.replication = Short.valueOf(st.getValue("REPLICATION"));
+      this.mtime = Long.valueOf(st.getValue("MTIME"));
+      this.atime = Long.valueOf(st.getValue("ATIME"));
+      this.blockSize = Long.valueOf(st.getValue("BLOCKSIZE"));
+      this.clientName = st.getValue("CLIENT_NAME");
+      this.clientMachine = st.getValue("CLIENT_MACHINE");
+      if (st.hasChildren("BLOCK")) {
+        List<Stanza> blocks = st.getChildren("BLOCK");
+        this.blocks = new Block[blocks.size()];
+        for (int i = 0; i < blocks.size(); i++) {
+          this.blocks[i] = FSEditLogOp.blockFromXml(blocks.get(i));
+        }
+      } else {
+        this.blocks = new Block[0];
+      }
+      this.permissions =
+          permissionStatusFromXml(st.getChildren("PERMISSION_STATUS").get(0));
+    }
   }
 
   static class AddOp extends AddCloseOp {
@@ -391,6 +445,7 @@ public abstract class FSEditLogOp {
     }
 
     @Override
+    public
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(path, out);
       FSImageSerialization.writeCompactBlockArray(blocks, out);
@@ -418,6 +473,23 @@ public abstract class FSEditLogOp {
         .append("]");
       return sb.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "PATH", path);
+      for (Block b : blocks) {
+        FSEditLogOp.blockToXml(contentHandler, b);
+      }
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.path = st.getValue("PATH");
+      List<Stanza> blocks = st.getChildren("BLOCK");
+      this.blocks = new Block[blocks.size()];
+      for (int i = 0; i < blocks.size(); i++) {
+        this.blocks[i] = FSEditLogOp.blockFromXml(blocks.get(i));
+      }
+    }
   }
 
   static class SetReplicationOp extends FSEditLogOp {
@@ -443,7 +515,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(path, out);
       FSImageSerialization.writeShort(replication, out);
@@ -474,6 +547,18 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "PATH", path);
+      XMLUtils.addSaxString(contentHandler, "REPLICATION",
+          Short.valueOf(replication).toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.path = st.getValue("PATH");
+      this.replication = Short.valueOf(st.getValue("REPLICATION"));
+    }
   }
 
   static class ConcatDeleteOp extends FSEditLogOp {
@@ -506,7 +591,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(trg, out);
             
@@ -567,6 +653,38 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "LENGTH",
+          Integer.valueOf(length).toString());
+      XMLUtils.addSaxString(contentHandler, "TRG", trg);
+      XMLUtils.addSaxString(contentHandler, "TIMESTAMP",
+          Long.valueOf(timestamp).toString());
+      contentHandler.startElement("", "", "SOURCES", new AttributesImpl());
+      for (int i = 0; i < srcs.length; ++i) {
+        XMLUtils.addSaxString(contentHandler,
+            "SOURCE" + (i + 1), srcs[i]);
+      }
+      contentHandler.endElement("", "", "SOURCES");
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.length = Integer.valueOf(st.getValue("LENGTH"));
+      this.trg = st.getValue("TRG");
+      this.timestamp = Long.valueOf(st.getValue("TIMESTAMP"));
+      List<Stanza> sources = st.getChildren("SOURCES");
+      int i = 0;
+      while (true) {
+        if (!sources.get(0).hasChildren("SOURCE" + (i + 1)))
+          break;
+        i++;
+      }
+      srcs = new String[i];
+      for (i = 0; i < srcs.length; i++) {
+        srcs[i] = sources.get(0).getValue("SOURCE" + (i + 1));
+      }
+    }
   }
 
   static class RenameOldOp extends FSEditLogOp {
@@ -599,7 +717,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(src, out);
       FSImageSerialization.writeString(dst, out);
@@ -643,6 +762,23 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "LENGTH",
+          Integer.valueOf(length).toString());
+      XMLUtils.addSaxString(contentHandler, "SRC", src);
+      XMLUtils.addSaxString(contentHandler, "DST", dst);
+      XMLUtils.addSaxString(contentHandler, "TIMESTAMP",
+          Long.valueOf(timestamp).toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.length = Integer.valueOf(st.getValue("LENGTH"));
+      this.src = st.getValue("SRC");
+      this.dst = st.getValue("DST");
+      this.timestamp = Long.valueOf(st.getValue("TIMESTAMP"));
+    }
   }
 
   static class DeleteOp extends FSEditLogOp {
@@ -669,7 +805,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(path, out);
       FSImageSerialization.writeLong(timestamp, out);
@@ -708,8 +845,23 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "LENGTH",
+          Integer.valueOf(length).toString());
+      XMLUtils.addSaxString(contentHandler, "PATH", path);
+      XMLUtils.addSaxString(contentHandler, "TIMESTAMP",
+          Long.valueOf(timestamp).toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.length = Integer.valueOf(st.getValue("LENGTH"));
+      this.path = st.getValue("PATH");
+      this.timestamp = Long.valueOf(st.getValue("TIMESTAMP"));
+    }
   }
-
+    
   static class MkdirOp extends FSEditLogOp {
     int length;
     String path;
@@ -740,7 +892,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(path, out);
       FSImageSerialization.writeLong(timestamp, out); // mtime
@@ -797,6 +950,24 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "LENGTH",
+          Integer.valueOf(length).toString());
+      XMLUtils.addSaxString(contentHandler, "PATH", path);
+      XMLUtils.addSaxString(contentHandler, "TIMESTAMP",
+          Long.valueOf(timestamp).toString());
+      FSEditLogOp.permissionStatusToXml(contentHandler, permissions);
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.length = Integer.valueOf(st.getValue("LENGTH"));
+      this.path = st.getValue("PATH");
+      this.timestamp = Long.valueOf(st.getValue("TIMESTAMP"));
+      this.permissions =
+          permissionStatusFromXml(st.getChildren("PERMISSION_STATUS").get(0));
+    }
   }
 
   static class SetGenstampOp extends FSEditLogOp {
@@ -816,7 +987,8 @@ public abstract class FSEditLogOp {
       return this;
     }
     
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeLong(genStamp, out);
     }
@@ -839,6 +1011,16 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "GENSTAMP",
+          Long.valueOf(genStamp).toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.genStamp = Long.valueOf(st.getValue("GENSTAMP"));
+    }
   }
 
   static class SetPermissionsOp extends FSEditLogOp {
@@ -864,7 +1046,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(src, out);
       permissions.write(out);
@@ -891,6 +1074,19 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "SRC", src);
+      XMLUtils.addSaxString(contentHandler, "MODE",
+          Short.valueOf(permissions.toShort()).toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.src = st.getValue("SRC");
+      this.permissions = new FsPermission(
+          Short.valueOf(st.getValue("MODE")));
+    }
   }
 
   static class SetOwnerOp extends FSEditLogOp {
@@ -922,7 +1118,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(src, out);
       FSImageSerialization.writeString(username == null ? "" : username, out);
@@ -953,6 +1150,25 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "SRC", src);
+      XMLUtils.addSaxString(contentHandler, "USERNAME", username);
+      if (groupname != null) {
+        XMLUtils.addSaxString(contentHandler, "GROUPNAME", groupname);
+      }
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.src = st.getValue("SRC");
+      this.username = st.getValue("USERNAME");
+      if (st.hasChildren("GROUPNAME")) {
+        this.groupname = st.getValue("GROUPNAME");
+      } else {
+        this.groupname = null;
+      }
+    }
   }
 
   static class SetNSQuotaOp extends FSEditLogOp {
@@ -968,7 +1184,8 @@ public abstract class FSEditLogOp {
         .get(OP_SET_NS_QUOTA);
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       throw new IOException("Deprecated");      
     }
@@ -994,6 +1211,18 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "SRC", src);
+      XMLUtils.addSaxString(contentHandler, "NSQUOTA",
+          Long.valueOf(nsQuota).toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.src = st.getValue("SRC");
+      this.nsQuota = Long.valueOf(st.getValue("NSQUOTA"));
+    }
   }
 
   static class ClearNSQuotaOp extends FSEditLogOp {
@@ -1008,7 +1237,8 @@ public abstract class FSEditLogOp {
         .get(OP_CLEAR_NS_QUOTA);
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       throw new IOException("Deprecated");      
     }
@@ -1031,6 +1261,15 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "SRC", src);
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.src = st.getValue("SRC");
+    }
   }
 
   static class SetQuotaOp extends FSEditLogOp {
@@ -1062,7 +1301,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(src, out);
       FSImageSerialization.writeLong(nsQuota, out);
@@ -1093,6 +1333,21 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "SRC", src);
+      XMLUtils.addSaxString(contentHandler, "NSQUOTA",
+          Long.valueOf(nsQuota).toString());
+      XMLUtils.addSaxString(contentHandler, "DSQUOTA",
+          Long.valueOf(dsQuota).toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.src = st.getValue("SRC");
+      this.nsQuota = Long.valueOf(st.getValue("NSQUOTA"));
+      this.dsQuota = Long.valueOf(st.getValue("DSQUOTA"));
+    }
   }
 
   static class TimesOp extends FSEditLogOp {
@@ -1125,7 +1380,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(path, out);
       FSImageSerialization.writeLong(mtime, out);
@@ -1170,6 +1426,24 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "LENGTH",
+          Integer.valueOf(length).toString());
+      XMLUtils.addSaxString(contentHandler, "PATH", path);
+      XMLUtils.addSaxString(contentHandler, "MTIME",
+          Long.valueOf(mtime).toString());
+      XMLUtils.addSaxString(contentHandler, "ATIME",
+          Long.valueOf(atime).toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.length = Integer.valueOf(st.getValue("LENGTH"));
+      this.path = st.getValue("PATH");
+      this.mtime = Long.valueOf(st.getValue("MTIME"));
+      this.atime = Long.valueOf(st.getValue("ATIME"));
+    }
   }
 
   static class SymlinkOp extends FSEditLogOp {
@@ -1214,7 +1488,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(path, out);
       FSImageSerialization.writeString(value, out);
@@ -1268,6 +1543,29 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "LENGTH",
+          Integer.valueOf(length).toString());
+      XMLUtils.addSaxString(contentHandler, "PATH", path);
+      XMLUtils.addSaxString(contentHandler, "VALUE", value);
+      XMLUtils.addSaxString(contentHandler, "MTIME",
+          Long.valueOf(mtime).toString());
+      XMLUtils.addSaxString(contentHandler, "ATIME",
+          Long.valueOf(atime).toString());
+      FSEditLogOp.permissionStatusToXml(contentHandler, permissionStatus);
+    }
+
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.length = Integer.valueOf(st.getValue("LENGTH"));
+      this.path = st.getValue("PATH");
+      this.value = st.getValue("VALUE");
+      this.mtime = Long.valueOf(st.getValue("MTIME"));
+      this.atime = Long.valueOf(st.getValue("ATIME"));
+      this.permissionStatus =
+          permissionStatusFromXml(st.getChildren("PERMISSION_STATUS").get(0));
+    }
   }
 
   static class RenameOp extends FSEditLogOp {
@@ -1306,7 +1604,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(src, out);
       FSImageSerialization.writeString(dst, out);
@@ -1375,6 +1674,44 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "LENGTH",
+          Integer.valueOf(length).toString());
+      XMLUtils.addSaxString(contentHandler, "SRC", src);
+      XMLUtils.addSaxString(contentHandler, "DST", dst);
+      XMLUtils.addSaxString(contentHandler, "TIMESTAMP",
+          Long.valueOf(timestamp).toString());
+      StringBuilder bld = new StringBuilder();
+      String prefix = "";
+      for (Rename r : options) {
+        bld.append(prefix).append(r.toString());
+        prefix = "|";
+      }
+      XMLUtils.addSaxString(contentHandler, "OPTIONS", bld.toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.length = Integer.valueOf(st.getValue("LENGTH"));
+      this.src = st.getValue("SRC");
+      this.dst = st.getValue("DST");
+      this.timestamp = Long.valueOf(st.getValue("TIMESTAMP"));
+      String opts = st.getValue("OPTIONS");
+      String o[] = opts.split("\\|");
+      this.options = new Rename[o.length];
+      for (int i = 0; i < o.length; i++) {
+        if (o[i].equals(""))
+          continue;
+        try {
+          this.options[i] = Rename.valueOf(o[i]);
+        } finally {
+          if (this.options[i] == null) {
+            System.err.println("error parsing Rename value: \"" + o[i] + "\"");
+          }
+        }
+      }
+    }
   }
 
   static class ReassignLeaseOp extends FSEditLogOp {
@@ -1406,7 +1743,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       FSImageSerialization.writeString(leaseHolder, out);
       FSImageSerialization.writeString(path, out);
@@ -1437,6 +1775,19 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      XMLUtils.addSaxString(contentHandler, "LEASEHOLDER", leaseHolder);
+      XMLUtils.addSaxString(contentHandler, "PATH", path);
+      XMLUtils.addSaxString(contentHandler, "NEWHOLDER", newHolder);
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.leaseHolder = st.getValue("LEASEHOLDER");
+      this.path = st.getValue("PATH");
+      this.newHolder = st.getValue("NEWHOLDER");
+    }
   }
 
   static class GetDelegationTokenOp extends FSEditLogOp {
@@ -1463,7 +1814,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       token.write(out);
       FSImageSerialization.writeLong(expiryTime, out);
@@ -1495,6 +1847,19 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      FSEditLogOp.delegationTokenToXml(contentHandler, token);
+      XMLUtils.addSaxString(contentHandler, "EXPIRY_TIME",
+          Long.valueOf(expiryTime).toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.token = delegationTokenFromXml(st.getChildren(
+          "DELEGATION_TOKEN_IDENTIFIER").get(0));
+      this.expiryTime = Long.valueOf(st.getValue("EXPIRY_TIME"));
+    }
   }
 
   static class RenewDelegationTokenOp extends FSEditLogOp {
@@ -1521,7 +1886,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       token.write(out);
       FSImageSerialization.writeLong(expiryTime, out);
@@ -1553,6 +1919,19 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      FSEditLogOp.delegationTokenToXml(contentHandler, token);
+      XMLUtils.addSaxString(contentHandler, "EXPIRY_TIME",
+          Long.valueOf(expiryTime).toString());
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.token = delegationTokenFromXml(st.getChildren(
+          "DELEGATION_TOKEN_IDENTIFIER").get(0));
+      this.expiryTime = Long.valueOf(st.getValue("EXPIRY_TIME"));
+    }
   }
 
   static class CancelDelegationTokenOp extends FSEditLogOp {
@@ -1573,7 +1952,8 @@ public abstract class FSEditLogOp {
       return this;
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       token.write(out);
     }
@@ -1597,6 +1977,16 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      FSEditLogOp.delegationTokenToXml(contentHandler, token);
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.token = delegationTokenFromXml(st.getChildren(
+          "DELEGATION_TOKEN_IDENTIFIER").get(0));
+    }
   }
 
   static class UpdateMasterKeyOp extends FSEditLogOp {
@@ -1616,7 +2006,8 @@ public abstract class FSEditLogOp {
       return this;
     }
     
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
       key.write(out);
     }
@@ -1640,6 +2031,16 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      FSEditLogOp.delegationKeyToXml(contentHandler, key);
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      this.key = delegationKeyFromXml(st.getChildren(
+          "DELEGATION_KEY").get(0));
+    }
   }
   
   static class LogSegmentOp extends FSEditLogOp {
@@ -1659,6 +2060,7 @@ public abstract class FSEditLogOp {
     }
 
     @Override
+    public
     void writeFields(DataOutputStream out) throws IOException {
       // no data stored
     }
@@ -1673,6 +2075,15 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      // no data stored
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      // do nothing
+    }
   }
 
   static class InvalidOp extends FSEditLogOp {
@@ -1684,7 +2095,8 @@ public abstract class FSEditLogOp {
       return (InvalidOp)opInstances.get().get(OP_INVALID);
     }
 
-    @Override 
+    @Override
+    public 
     void writeFields(DataOutputStream out) throws IOException {
     }
     
@@ -1704,6 +2116,14 @@ public abstract class FSEditLogOp {
       builder.append("]");
       return builder.toString();
     }
+    @Override
+    protected void toXml(ContentHandler contentHandler) throws SAXException {
+      // no data stored
+    }
+    
+    @Override void fromXml(Stanza st) throws InvalidXmlException {
+      // do nothing
+    }
   }
 
   static private short readShort(DataInputStream in) throws IOException {
@@ -1874,4 +2294,140 @@ public abstract class FSEditLogOp {
       }
     }
   }
+
+  public void outputToXml(ContentHandler contentHandler) throws SAXException {
+    contentHandler.startElement("", "", "RECORD", new AttributesImpl());
+    XMLUtils.addSaxString(contentHandler, "OPCODE", opCode.toString());
+    contentHandler.startElement("", "", "DATA", new AttributesImpl());
+    XMLUtils.addSaxString(contentHandler, "TXID", "" + txid);
+    toXml(contentHandler);
+    contentHandler.endElement("", "", "DATA");
+    contentHandler.endElement("", "", "RECORD");
+  }
+
+  protected abstract void toXml(ContentHandler contentHandler)
+      throws SAXException;
+  
+  abstract void fromXml(Stanza st) throws InvalidXmlException;
+  
+  public void decodeXml(Stanza st) throws InvalidXmlException {
+    this.txid = Long.valueOf(st.getValue("TXID"));
+    fromXml(st);
+  }
+  
+  public static void blockToXml(ContentHandler contentHandler, Block block) 
+      throws SAXException {
+    contentHandler.startElement("", "", "BLOCK", new AttributesImpl());
+    XMLUtils.addSaxString(contentHandler, "BLOCK_ID",
+        Long.valueOf(block.getBlockId()).toString());
+    XMLUtils.addSaxString(contentHandler, "NUM_BYTES",
+        Long.valueOf(block.getNumBytes()).toString());
+    XMLUtils.addSaxString(contentHandler, "GENERATION_STAMP",
+        Long.valueOf(block.getGenerationStamp()).toString());
+    contentHandler.endElement("", "", "BLOCK");
+  }
+
+  public static Block blockFromXml(Stanza st)
+      throws InvalidXmlException {
+    long blockId = Long.valueOf(st.getValue("BLOCK_ID"));
+    long numBytes = Long.valueOf(st.getValue("NUM_BYTES"));
+    long generationStamp = Long.valueOf(st.getValue("GENERATION_STAMP"));
+    return new Block(blockId, numBytes, generationStamp);
+  }
+
+  public static void delegationTokenToXml(ContentHandler contentHandler,
+      DelegationTokenIdentifier token) throws SAXException {
+    contentHandler.startElement("", "", "DELEGATION_TOKEN_IDENTIFIER", new AttributesImpl());
+    XMLUtils.addSaxString(contentHandler, "KIND", token.getKind().toString());
+    XMLUtils.addSaxString(contentHandler, "SEQUENCE_NUMBER",
+        Integer.valueOf(token.getSequenceNumber()).toString());
+    XMLUtils.addSaxString(contentHandler, "OWNER",
+        token.getOwner().toString());
+    XMLUtils.addSaxString(contentHandler, "RENEWER",
+        token.getRenewer().toString());
+    XMLUtils.addSaxString(contentHandler, "REALUSER",
+        token.getRealUser().toString());
+    XMLUtils.addSaxString(contentHandler, "ISSUE_DATE",
+        Long.valueOf(token.getIssueDate()).toString());
+    XMLUtils.addSaxString(contentHandler, "MAX_DATE",
+        Long.valueOf(token.getMaxDate()).toString());
+    XMLUtils.addSaxString(contentHandler, "MASTER_KEY_ID",
+        Integer.valueOf(token.getMasterKeyId()).toString());
+    contentHandler.endElement("", "", "DELEGATION_TOKEN_IDENTIFIER");
+  }
+
+  public static DelegationTokenIdentifier delegationTokenFromXml(Stanza st)
+      throws InvalidXmlException {
+    String kind = st.getValue("KIND");
+    if (!kind.equals(DelegationTokenIdentifier.
+        HDFS_DELEGATION_KIND.toString())) {
+      throw new InvalidXmlException("can't understand " +
+        "DelegationTokenIdentifier KIND " + kind);
+    }
+    int seqNum = Integer.valueOf(st.getValue("SEQUENCE_NUMBER"));
+    String owner = st.getValue("OWNER");
+    String renewer = st.getValue("RENEWER");
+    String realuser = st.getValue("REALUSER");
+    long issueDate = Long.valueOf(st.getValue("ISSUE_DATE"));
+    long maxDate = Long.valueOf(st.getValue("MAX_DATE"));
+    int masterKeyId = Integer.valueOf(st.getValue("MASTER_KEY_ID"));
+    DelegationTokenIdentifier token =
+        new DelegationTokenIdentifier(new Text(owner),
+            new Text(renewer), new Text(realuser));
+    token.setSequenceNumber(seqNum);
+    token.setIssueDate(issueDate);
+    token.setMaxDate(maxDate);
+    token.setMasterKeyId(masterKeyId);
+    return token;
+  }
+
+  public static void delegationKeyToXml(ContentHandler contentHandler,
+      DelegationKey key) throws SAXException {
+    contentHandler.startElement("", "", "DELEGATION_KEY", new AttributesImpl());
+    XMLUtils.addSaxString(contentHandler, "KEY_ID",
+        Integer.valueOf(key.getKeyId()).toString());
+    XMLUtils.addSaxString(contentHandler, "EXPIRY_DATE",
+        Long.valueOf(key.getExpiryDate()).toString());
+    if (key.getEncodedKey() != null) {
+      XMLUtils.addSaxString(contentHandler, "KEY",
+          Hex.encodeHexString(key.getEncodedKey()));
+    }
+    contentHandler.endElement("", "", "DELEGATION_KEY");
+  }
+  
+  public static DelegationKey delegationKeyFromXml(Stanza st)
+      throws InvalidXmlException {
+    int keyId = Integer.valueOf(st.getValue("KEY_ID"));
+    long expiryDate = Long.valueOf(st.getValue("EXPIRY_DATE"));
+    byte key[] = null;
+    try {
+      key = Hex.decodeHex(st.getValue("KEY").toCharArray());
+    } catch (DecoderException e) {
+      throw new InvalidXmlException(e.toString());
+    } catch (InvalidXmlException e) {
+    }
+    return new DelegationKey(keyId, expiryDate, key);
+  }
+
+  public static void permissionStatusToXml(ContentHandler contentHandler,
+      PermissionStatus perm) throws SAXException {
+    contentHandler.startElement("", "", "PERMISSION_STATUS", new AttributesImpl());
+    XMLUtils.addSaxString(contentHandler, "USERNAME", perm.getUserName());
+    XMLUtils.addSaxString(contentHandler, "GROUPNAME", perm.getGroupName());
+    XMLUtils.addSaxString(contentHandler, "MODE",
+        Short.valueOf(perm.getPermission().toShort()).toString());
+    contentHandler.endElement("", "", "PERMISSION_STATUS");
+  }
+
+  public static PermissionStatus permissionStatusFromXml(Stanza st)
+      throws InvalidXmlException {
+    String username = st.getValue("USERNAME");
+    String groupname = st.getValue("GROUPNAME");
+    short mode = Short.valueOf(st.getValue("MODE"));
+    return new PermissionStatus(username, groupname, new FsPermission(mode));
+  }
+
+  public static FSEditLogOp getOpInstance(FSEditLogOpCodes opCode) {
+    return opInstances.get().get(opCode);
+  }
 }



Mime
View raw message