hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1444439 [3/3] - in /hadoop/common/branches/HDFS-2802/hadoop-hdfs-project: hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/ hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/ hadoop-hdfs-httpfs/src/test/jav...
Date Sat, 09 Feb 2013 21:29:51 GMT
Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java?rev=1444439&r1=1444438&r2=1444439&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java
(original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryNameNodeUpgrade.java
Sat Feb  9 21:29:44 2013
@@ -17,9 +17,12 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import com.google.common.collect.ImmutableMap;
+
 import java.io.File;
 import java.io.IOException;
 import java.util.List;
+import java.util.Map;
 
 import org.junit.Test;
 import org.junit.Before;
@@ -51,7 +54,7 @@ public class TestSecondaryNameNodeUpgrad
     }
   }
 
-  private void doIt(String param, String val) throws IOException {
+  private void doIt(Map<String, String> paramsToCorrupt) throws IOException {
     MiniDFSCluster cluster = null;
     FileSystem fs = null;
     SecondaryNameNode snn = null;
@@ -76,8 +79,12 @@ public class TestSecondaryNameNodeUpgrad
       snn.shutdown();
 
       for (File versionFile : versionFiles) {
-        System.out.println("Changing '" + param + "' to '" + val + "' in " + versionFile);
-        FSImageTestUtil.corruptVersionFile(versionFile, param, val);
+        for (Map.Entry<String, String> paramToCorrupt : paramsToCorrupt.entrySet())
{
+          String param = paramToCorrupt.getKey();
+          String val = paramToCorrupt.getValue();
+          System.out.println("Changing '" + param + "' to '" + val + "' in " + versionFile);
+          FSImageTestUtil.corruptVersionFile(versionFile, param, val);
+        }
       }
 
       snn = new SecondaryNameNode(conf);
@@ -94,13 +101,19 @@ public class TestSecondaryNameNodeUpgrad
 
   @Test
   public void testUpgradeLayoutVersionSucceeds() throws IOException {
-    doIt("layoutVersion", "-39");
+    doIt(ImmutableMap.of("layoutVersion", "-39"));
+  }
+
+  @Test
+  public void testUpgradePreFedSucceeds() throws IOException {
+    doIt(ImmutableMap.of("layoutVersion", "-19", "clusterID", "",
+          "blockpoolID", ""));
   }
 
   @Test
   public void testChangeNsIDFails() throws IOException {
     try {
-      doIt("namespaceID", "2");
+      doIt(ImmutableMap.of("namespaceID", "2"));
       Assert.fail("Should throw InconsistentFSStateException");
     } catch(IOException e) {
       GenericTestUtils.assertExceptionContains("Inconsistent checkpoint fields", e);

Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java?rev=1444439&r1=1444438&r2=1444439&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
(original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
Sat Feb  9 21:29:44 2013
@@ -82,7 +82,7 @@ public class TestNameNodeMetrics {
     CONF.set(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY, 
         "" + PERCENTILES_INTERVAL);
     // Enable stale DataNodes checking
-    CONF.setBoolean(DFSConfigKeys.DFS_NAMENODE_CHECK_STALE_DATANODE_KEY, true);
+    CONF.setBoolean(DFSConfigKeys.DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_KEY, true);
     ((Log4JLogger)LogFactory.getLog(MetricsAsserts.class))
       .getLogger().setLevel(Level.DEBUG);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java?rev=1444439&r1=1444438&r2=1444439&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
(original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
Sat Feb  9 21:29:44 2013
@@ -24,6 +24,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
+import org.apache.hadoop.hdfs.server.namenode.INodeId;
 import org.apache.hadoop.util.Time;
 import org.junit.Assert;
 import org.junit.Test;
@@ -42,9 +43,10 @@ public class TestJsonUtil {
   public void testHdfsFileStatus() {
     final long now = Time.now();
     final String parent = "/dir";
-    final HdfsFileStatus status = new HdfsFileStatus(1001L, false, 3, 1L<<26,
-        now, now + 10, new FsPermission((short)0644), "user", "group",
-        DFSUtil.string2Bytes("bar"), DFSUtil.string2Bytes("foo"));
+    final HdfsFileStatus status = new HdfsFileStatus(1001L, false, 3, 1L << 26,
+        now, now + 10, new FsPermission((short) 0644), "user", "group",
+        DFSUtil.string2Bytes("bar"), DFSUtil.string2Bytes("foo"),
+        INodeId.GRANDFATHER_INODE_ID);
     final FileStatus fstatus = toFileStatus(status, parent);
     System.out.println("status  = " + status);
     System.out.println("fstatus = " + fstatus);

Modified: hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml?rev=1444439&r1=1444438&r2=1444439&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
(original)
+++ hadoop/common/branches/HDFS-2802/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testHDFSConf.xml
Sat Feb  9 21:29:44 2013
@@ -1182,7 +1182,7 @@
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^1\.0k\s+hdfs:///dir0/data1k</expected-output>
+          <expected-output>^1\.0 K\s+hdfs:///dir0/data1k</expected-output>
         </comparator>
       </comparators>
     </test>
@@ -15590,7 +15590,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>put: The DiskSpace quota of /dir1 is exceeded: quota=1.0k
diskspace consumed=[0-9.]+[kmg]*</expected-output>
+          <expected-output>put: The DiskSpace quota of /dir1 is exceeded: quota = 1024
B = 1 KB but diskspace consumed = [0-9]+ B = [0-9.]+ [KMG]B*</expected-output>
         </comparator>
       </comparators>
     </test>



Mime
View raw message