hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ji...@apache.org
Subject [1/2] hadoop git commit: HDFS-9427. HDFS should not default to ephemeral ports. Contributed by Xiaobing Zhou.
Date Fri, 22 Apr 2016 22:15:06 GMT
Repository: hadoop
Updated Branches:
  refs/heads/trunk c610031ca -> 63e5412f1


http://git-wip-us.apache.org/repos/asf/hadoop/blob/63e5412f/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
index 6186f30..b0626ba 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
@@ -37,36 +37,36 @@ public class TestOptionsParser {
   @Test
   public void testParseIgnoreFailure() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertFalse(options.shouldIgnoreFailures());
 
     options = OptionsParser.parse(new String[] {
         "-i",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldIgnoreFailures());
   }
 
   @Test
   public void testParseOverwrite() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertFalse(options.shouldOverwrite());
 
     options = OptionsParser.parse(new String[] {
         "-overwrite",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldOverwrite());
 
     try {
       OptionsParser.parse(new String[] {
           "-update",
           "-overwrite",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/"});
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/"});
       Assert.fail("Update and overwrite aren't allowed together");
     } catch (IllegalArgumentException ignore) {
     }
@@ -75,44 +75,44 @@ public class TestOptionsParser {
   @Test
   public void testLogPath() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertNull(options.getLogPath());
 
     options = OptionsParser.parse(new String[] {
         "-log",
-        "hdfs://localhost:8020/logs",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
-    Assert.assertEquals(options.getLogPath(), new Path("hdfs://localhost:8020/logs"));
+        "hdfs://localhost:9820/logs",
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
+    Assert.assertEquals(options.getLogPath(), new Path("hdfs://localhost:9820/logs"));
   }
 
   @Test
   public void testParseBlokcing() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldBlock());
 
     options = OptionsParser.parse(new String[] {
         "-async",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertFalse(options.shouldBlock());
   }
 
   @Test
   public void testParsebandwidth() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(options.getMapBandwidth(), DistCpConstants.DEFAULT_BANDWIDTH_MB,
DELTA);
 
     options = OptionsParser.parse(new String[] {
         "-bandwidth",
         "11.2",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(options.getMapBandwidth(), 11.2, DELTA);
   }
 
@@ -121,8 +121,8 @@ public class TestOptionsParser {
     OptionsParser.parse(new String[] {
         "-bandwidth",
         "-11",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
   }
 
   @Test(expected=IllegalArgumentException.class)
@@ -130,22 +130,22 @@ public class TestOptionsParser {
     OptionsParser.parse(new String[] {
         "-bandwidth",
         "0",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
   }
 
   @Test
   public void testParseSkipCRC() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertFalse(options.shouldSkipCRC());
 
     options = OptionsParser.parse(new String[] {
         "-update",
         "-skipcrccheck",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldSyncFolder());
     Assert.assertTrue(options.shouldSkipCRC());
   }
@@ -153,22 +153,22 @@ public class TestOptionsParser {
   @Test
   public void testParseAtomicCommit() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertFalse(options.shouldAtomicCommit());
 
     options = OptionsParser.parse(new String[] {
         "-atomic",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldAtomicCommit());
 
     try {
       OptionsParser.parse(new String[] {
           "-atomic",
           "-update",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/"});
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/"});
       Assert.fail("Atomic and sync folders were allowed");
     } catch (IllegalArgumentException ignore) { }
   }
@@ -176,30 +176,30 @@ public class TestOptionsParser {
   @Test
   public void testParseWorkPath() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertNull(options.getAtomicWorkPath());
 
     options = OptionsParser.parse(new String[] {
         "-atomic",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertNull(options.getAtomicWorkPath());
 
     options = OptionsParser.parse(new String[] {
         "-atomic",
         "-tmp",
-        "hdfs://localhost:8020/work",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
-    Assert.assertEquals(options.getAtomicWorkPath(), new Path("hdfs://localhost:8020/work"));
+        "hdfs://localhost:9820/work",
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
+    Assert.assertEquals(options.getAtomicWorkPath(), new Path("hdfs://localhost:9820/work"));
 
     try {
       OptionsParser.parse(new String[] {
           "-tmp",
-          "hdfs://localhost:8020/work",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/"});
+          "hdfs://localhost:9820/work",
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/"});
       Assert.fail("work path was allowed without -atomic switch");
     } catch (IllegalArgumentException ignore) {}
   }
@@ -207,37 +207,37 @@ public class TestOptionsParser {
   @Test
   public void testParseSyncFolders() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertFalse(options.shouldSyncFolder());
 
     options = OptionsParser.parse(new String[] {
         "-update",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldSyncFolder());
   }
 
   @Test
   public void testParseDeleteMissing() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertFalse(options.shouldDeleteMissing());
 
     options = OptionsParser.parse(new String[] {
         "-update",
         "-delete",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldSyncFolder());
     Assert.assertTrue(options.shouldDeleteMissing());
 
     options = OptionsParser.parse(new String[] {
         "-overwrite",
         "-delete",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldOverwrite());
     Assert.assertTrue(options.shouldDeleteMissing());
 
@@ -245,8 +245,8 @@ public class TestOptionsParser {
       OptionsParser.parse(new String[] {
           "-atomic",
           "-delete",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/"});
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/"});
       Assert.fail("Atomic and delete folders were allowed");
     } catch (IllegalArgumentException ignore) { }
   }
@@ -254,38 +254,38 @@ public class TestOptionsParser {
   @Test
   public void testParseMaps() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(options.getMaxMaps(), DistCpConstants.DEFAULT_MAPS);
 
     options = OptionsParser.parse(new String[] {
         "-m",
         "1",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(options.getMaxMaps(), 1);
 
     options = OptionsParser.parse(new String[] {
         "-m",
         "0",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(options.getMaxMaps(), 1);
 
     try {
       OptionsParser.parse(new String[] {
           "-m",
           "hello",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/"});
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/"});
       Assert.fail("Non numberic map parsed");
     } catch (IllegalArgumentException ignore) { }
 
     try {
       OptionsParser.parse(new String[] {
           "-mapredXslConf",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/"});
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/"});
       Assert.fail("Non numberic map parsed");
     } catch (IllegalArgumentException ignore) { }
   }
@@ -293,8 +293,8 @@ public class TestOptionsParser {
   @Test
   public void testParseNumListstatusThreads() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     // If command line argument isn't set, we expect .getNumListstatusThreads
     // option to be zero (so that we know when to override conf properties).
     Assert.assertEquals(0, options.getNumListstatusThreads());
@@ -302,23 +302,23 @@ public class TestOptionsParser {
     options = OptionsParser.parse(new String[] {
         "--numListstatusThreads",
         "12",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(12, options.getNumListstatusThreads());
 
     options = OptionsParser.parse(new String[] {
         "--numListstatusThreads",
         "0",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(0, options.getNumListstatusThreads());
 
     try {
       OptionsParser.parse(new String[] {
           "--numListstatusThreads",
           "hello",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/"});
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/"});
       Assert.fail("Non numberic numListstatusThreads parsed");
     } catch (IllegalArgumentException ignore) { }
 
@@ -326,8 +326,8 @@ public class TestOptionsParser {
     options = OptionsParser.parse(new String[] {
         "--numListstatusThreads",
         "100",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(DistCpOptions.maxNumListstatusThreads,
                         options.getNumListstatusThreads());
   }
@@ -336,10 +336,10 @@ public class TestOptionsParser {
   public void testSourceListing() {
     DistCpOptions options = OptionsParser.parse(new String[] {
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(options.getSourceFileListing(),
-        new Path("hdfs://localhost:8020/source/first"));
+        new Path("hdfs://localhost:9820/source/first"));
   }
 
   @Test
@@ -347,9 +347,9 @@ public class TestOptionsParser {
     try {
       OptionsParser.parse(new String[] {
           "-f",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/"});
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/"});
       Assert.fail("Both source listing & source paths allowed");
     } catch (IllegalArgumentException ignore) {}
   }
@@ -358,7 +358,7 @@ public class TestOptionsParser {
   public void testMissingSourceInfo() {
     try {
       OptionsParser.parse(new String[] {
-          "hdfs://localhost:8020/target/"});
+          "hdfs://localhost:9820/target/"});
       Assert.fail("Neither source listing not source paths present");
     } catch (IllegalArgumentException ignore) {}
   }
@@ -367,7 +367,7 @@ public class TestOptionsParser {
   public void testMissingTarget() {
     try {
       OptionsParser.parse(new String[] {
-          "-f", "hdfs://localhost:8020/source"});
+          "-f", "hdfs://localhost:9820/source"});
       Assert.fail("Missing target allowed");
     } catch (IllegalArgumentException ignore) {}
   }
@@ -376,7 +376,7 @@ public class TestOptionsParser {
   public void testInvalidArgs() {
     try {
       OptionsParser.parse(new String[] {
-          "-m", "-f", "hdfs://localhost:8020/source"});
+          "-m", "-f", "hdfs://localhost:9820/source"});
       Assert.fail("Missing map value");
     } catch (IllegalArgumentException ignore) {}
   }
@@ -404,14 +404,14 @@ public class TestOptionsParser {
         "-strategy",
         "dynamic",
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(options.getCopyStrategy(), "dynamic");
 
     options = OptionsParser.parse(new String[] {
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(options.getCopyStrategy(), DistCpConstants.UNIFORMSIZE);
   }
 
@@ -419,17 +419,17 @@ public class TestOptionsParser {
   public void testTargetPath() {
     DistCpOptions options = OptionsParser.parse(new String[] {
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
-    Assert.assertEquals(options.getTargetPath(), new Path("hdfs://localhost:8020/target/"));
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
+    Assert.assertEquals(options.getTargetPath(), new Path("hdfs://localhost:9820/target/"));
   }
 
   @Test
   public void testPreserve() {
     DistCpOptions options = OptionsParser.parse(new String[] {
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertFalse(options.shouldPreserve(FileAttribute.BLOCKSIZE));
     Assert.assertFalse(options.shouldPreserve(FileAttribute.REPLICATION));
     Assert.assertFalse(options.shouldPreserve(FileAttribute.PERMISSION));
@@ -440,8 +440,8 @@ public class TestOptionsParser {
     options = OptionsParser.parse(new String[] {
         "-p",
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldPreserve(FileAttribute.BLOCKSIZE));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.REPLICATION));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.PERMISSION));
@@ -453,8 +453,8 @@ public class TestOptionsParser {
 
     options = OptionsParser.parse(new String[] {
         "-p",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldPreserve(FileAttribute.BLOCKSIZE));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.REPLICATION));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.PERMISSION));
@@ -467,8 +467,8 @@ public class TestOptionsParser {
     options = OptionsParser.parse(new String[] {
         "-pbr",
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldPreserve(FileAttribute.BLOCKSIZE));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.REPLICATION));
     Assert.assertFalse(options.shouldPreserve(FileAttribute.PERMISSION));
@@ -481,8 +481,8 @@ public class TestOptionsParser {
     options = OptionsParser.parse(new String[] {
         "-pbrgup",
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldPreserve(FileAttribute.BLOCKSIZE));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.REPLICATION));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.PERMISSION));
@@ -495,8 +495,8 @@ public class TestOptionsParser {
     options = OptionsParser.parse(new String[] {
         "-pbrgupcaxt",
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertTrue(options.shouldPreserve(FileAttribute.BLOCKSIZE));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.REPLICATION));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.PERMISSION));
@@ -510,8 +510,8 @@ public class TestOptionsParser {
     options = OptionsParser.parse(new String[] {
         "-pc",
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertFalse(options.shouldPreserve(FileAttribute.BLOCKSIZE));
     Assert.assertFalse(options.shouldPreserve(FileAttribute.REPLICATION));
     Assert.assertFalse(options.shouldPreserve(FileAttribute.PERMISSION));
@@ -524,8 +524,8 @@ public class TestOptionsParser {
     options = OptionsParser.parse(new String[] {
         "-p",
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     int i = 0;
     Iterator<FileAttribute> attribIterator = options.preserveAttributes();
     while (attribIterator.hasNext()) {
@@ -538,8 +538,8 @@ public class TestOptionsParser {
       OptionsParser.parse(new String[] {
           "-pabcd",
           "-f",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target"});
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target"});
       Assert.fail("Invalid preserve attribute");
     }
     catch (IllegalArgumentException ignore) {}
@@ -547,8 +547,8 @@ public class TestOptionsParser {
 
     options = OptionsParser.parse(new String[] {
         "-f",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertFalse(options.shouldPreserve(FileAttribute.PERMISSION));
     options.preserve(FileAttribute.PERMISSION);
     Assert.assertTrue(options.shouldPreserve(FileAttribute.PERMISSION));
@@ -573,8 +573,8 @@ public class TestOptionsParser {
     DistCpOptions options = OptionsParser.parse(new String[] {
         "-atomic",
         "-i",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     options.appendToConf(conf);
     Assert.assertTrue(conf.getBoolean(DistCpOptionSwitch.IGNORE_FAILURES.getConfigLabel(),
false));
     Assert.assertTrue(conf.getBoolean(DistCpOptionSwitch.ATOMIC_COMMIT.getConfigLabel(),
false));
@@ -591,8 +591,8 @@ public class TestOptionsParser {
         "-pu",
         "-bandwidth",
         "11.2",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     options.appendToConf(conf);
     Assert.assertTrue(conf.getBoolean(DistCpOptionSwitch.SYNC_FOLDERS.getConfigLabel(), false));
     Assert.assertTrue(conf.getBoolean(DistCpOptionSwitch.DELETE_MISSING.getConfigLabel(),
false));
@@ -609,8 +609,8 @@ public class TestOptionsParser {
         DistCpOptionSwitch.SYNC_FOLDERS.getConfigLabel(), false));
 
     DistCpOptions options = OptionsParser.parse(new String[] { "-update",
-        "-append", "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/" });
+        "-append", "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/" });
     options.appendToConf(conf);
     Assert.assertTrue(conf.getBoolean(
         DistCpOptionSwitch.APPEND.getConfigLabel(), false));
@@ -620,8 +620,8 @@ public class TestOptionsParser {
     // make sure -append is only valid when -update is specified
     try {
       OptionsParser.parse(new String[] { "-append",
-              "hdfs://localhost:8020/source/first",
-              "hdfs://localhost:8020/target/" });
+              "hdfs://localhost:9820/source/first",
+              "hdfs://localhost:9820/target/" });
       fail("Append should fail if update option is not specified");
     } catch (IllegalArgumentException e) {
       GenericTestUtils.assertExceptionContains(
@@ -632,8 +632,8 @@ public class TestOptionsParser {
     try {
       OptionsParser.parse(new String[] {
           "-append", "-update", "-skipcrccheck",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/" });
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/" });
       fail("Append should fail if skipCrc option is specified");
     } catch (IllegalArgumentException e) {
       GenericTestUtils.assertExceptionContains(
@@ -649,8 +649,8 @@ public class TestOptionsParser {
 
     DistCpOptions options = OptionsParser.parse(new String[] { "-update",
         "-diff", "s1", "s2",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/" });
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/" });
     options.appendToConf(conf);
     Assert.assertTrue(conf.getBoolean(DistCpOptionSwitch.DIFF.getConfigLabel(), false));
     Assert.assertTrue(options.shouldUseDiff());
@@ -659,8 +659,8 @@ public class TestOptionsParser {
 
     options = OptionsParser.parse(new String[] {
         "-diff", "s1", ".", "-update",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/" });
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/" });
     options.appendToConf(conf);
     Assert.assertTrue(conf.getBoolean(DistCpOptionSwitch.DIFF.getConfigLabel(),
         false));
@@ -671,8 +671,8 @@ public class TestOptionsParser {
     // -diff requires two option values
     try {
       OptionsParser.parse(new String[] {"-diff", "s1", "-update",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/" });
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/" });
       fail("-diff should fail with only one snapshot name");
     } catch (IllegalArgumentException e) {
       GenericTestUtils.assertExceptionContains(
@@ -682,8 +682,8 @@ public class TestOptionsParser {
     // make sure -diff is only valid when -update is specified
     try {
       OptionsParser.parse(new String[] { "-diff", "s1", "s2",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/" });
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/" });
       fail("-diff should fail if -update option is not specified");
     } catch (IllegalArgumentException e) {
       GenericTestUtils.assertExceptionContains(
@@ -692,8 +692,8 @@ public class TestOptionsParser {
 
     try {
       OptionsParser.parse(new String[] { "-diff", "s1", "s2", "-update", "-delete",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/" });
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/" });
       fail("-diff should fail if -delete option is specified");
     } catch (IllegalArgumentException e) {
       GenericTestUtils.assertExceptionContains(
@@ -703,8 +703,8 @@ public class TestOptionsParser {
     try {
       OptionsParser.parse(new String[] { "-diff", "s1", "s2",
           "-delete", "-overwrite",
-          "hdfs://localhost:8020/source/first",
-          "hdfs://localhost:8020/target/" });
+          "hdfs://localhost:9820/source/first",
+          "hdfs://localhost:9820/target/" });
       fail("-diff should fail if -update option is not specified");
     } catch (IllegalArgumentException e) {
       GenericTestUtils.assertExceptionContains(
@@ -715,15 +715,15 @@ public class TestOptionsParser {
   @Test
   public void testExclusionsOption() {
     DistCpOptions options = OptionsParser.parse(new String[] {
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertNull(options.getFiltersFile());
 
     options = OptionsParser.parse(new String[] {
         "-filters",
         "/tmp/filters.txt",
-        "hdfs://localhost:8020/source/first",
-        "hdfs://localhost:8020/target/"});
+        "hdfs://localhost:9820/source/first",
+        "hdfs://localhost:9820/target/"});
     Assert.assertEquals(options.getFiltersFile(), "/tmp/filters.txt");
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/63e5412f/hadoop-tools/hadoop-openstack/src/site/markdown/index.md
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-openstack/src/site/markdown/index.md b/hadoop-tools/hadoop-openstack/src/site/markdown/index.md
index 1815f60..7c5e783 100644
--- a/hadoop-tools/hadoop-openstack/src/site/markdown/index.md
+++ b/hadoop-tools/hadoop-openstack/src/site/markdown/index.md
@@ -165,7 +165,7 @@ Hadoop uses URIs to refer to files within a filesystem. Some common examples
are
 
         local://etc/hosts
         hdfs://cluster1/users/example/data/set1
-        hdfs://cluster2.example.org:8020/users/example/data/set1
+        hdfs://cluster2.example.org:9820/users/example/data/set1
 
 The Swift Filesystem Client adds a new URL type `swift`. In a Swift Filesystem URL, the hostname
part of a URL identifies the container and the service to work with; the path the name of
the object. Here are some examples
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/63e5412f/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json b/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json
index 839f767..d7c219d 100644
--- a/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json
+++ b/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json
@@ -4549,7 +4549,7 @@
     "s3.blocksize" : "67108864",
     "mapreduce.task.io.sort.factor" : "10",
     "yarn.nodemanager.disk-health-checker.interval-ms" : "120000",
-    "mapreduce.job.working.dir" : "hdfs://a2115.smile.com:8020/user/jenkins",
+    "mapreduce.job.working.dir" : "hdfs://a2115.smile.com:9820/user/jenkins",
     "yarn.admin.acl" : "*",
     "mapreduce.job.speculative.speculativecap" : "0.1",
     "dfs.namenode.num.checkpoints.retained" : "2",
@@ -4585,7 +4585,7 @@
     "dfs.namenode.edits.dir" : "${dfs.namenode.name.dir}",
     "ha.health-monitor.sleep-after-disconnect.ms" : "1000",
     "dfs.encrypt.data.transfer" : "false",
-    "dfs.datanode.http.address" : "0.0.0.0:50075",
+    "dfs.datanode.http.address" : "0.0.0.0:9864",
     "mapreduce.terasort.num-rows" : "400000000",
     "mapreduce.job.map.class" : "org.apache.hadoop.examples.terasort.TeraGen$SortGenMapper",
     "mapreduce.jobtracker.jobhistory.task.numberprogresssplits" : "12",
@@ -4811,7 +4811,7 @@
     "ftp.stream-buffer-size" : "4096",
     "dfs.namenode.avoid.write.stale.datanode" : "false",
     "hadoop.security.group.mapping.ldap.search.attr.member" : "member",
-    "mapreduce.output.fileoutputformat.outputdir" : "hdfs://a2115.smile.com:8020/user/jenkins/tera-gen-1",
+    "mapreduce.output.fileoutputformat.outputdir" : "hdfs://a2115.smile.com:9820/user/jenkins/tera-gen-1",
     "dfs.blockreport.initialDelay" : "0",
     "yarn.nm.liveness-monitor.expiry-interval-ms" : "600000",
     "hadoop.http.authentication.token.validity" : "36000",
@@ -4837,7 +4837,7 @@
     "yarn.scheduler.maximum-allocation-mb" : "8192",
     "yarn.nodemanager.heartbeat.interval-ms" : "1000",
     "mapreduce.job.userlog.retain.hours" : "24",
-    "dfs.namenode.secondary.http-address" : "0.0.0.0:50090",
+    "dfs.namenode.secondary.http-address" : "0.0.0.0:9868",
     "mapreduce.task.timeout" : "600000",
     "mapreduce.framework.name" : "yarn",
     "ipc.client.idlethreshold" : "4000",
@@ -4857,7 +4857,7 @@
     "hadoop.security.auth_to_local" : "DEFAULT",
     "dfs.secondary.namenode.kerberos.internal.spnego.principal" : "${dfs.web.authentication.kerberos.principal}",
     "ftp.client-write-packet-size" : "65536",
-    "fs.defaultFS" : "hdfs://a2115.smile.com:8020",
+    "fs.defaultFS" : "hdfs://a2115.smile.com:9820",
     "yarn.nodemanager.address" : "0.0.0.0:0",
     "yarn.scheduler.fair.assignmultiple" : "true",
     "yarn.resourcemanager.scheduler.client.thread-count" : "50",
@@ -4901,9 +4901,9 @@
     "fs.s3n.block.size" : "67108864",
     "fs.ftp.host" : "0.0.0.0",
     "hadoop.security.group.mapping" : "org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback",
-    "dfs.datanode.address" : "0.0.0.0:50010",
+    "dfs.datanode.address" : "0.0.0.0:9866",
     "mapreduce.map.skip.maxrecords" : "0",
-    "dfs.datanode.https.address" : "0.0.0.0:50475",
+    "dfs.datanode.https.address" : "0.0.0.0:9865",
     "file.replication" : "1",
     "yarn.resourcemanager.resource-tracker.address" : "a2115.smile.com:8031",
     "dfs.datanode.drop.cache.behind.reads" : "false",
@@ -4970,7 +4970,7 @@
     "yarn.resourcemanager.resource-tracker.client.thread-count" : "50",
     "mapreduce.tasktracker.dns.nameserver" : "default",
     "mapreduce.map.output.compress" : "true",
-    "dfs.datanode.ipc.address" : "0.0.0.0:50020",
+    "dfs.datanode.ipc.address" : "0.0.0.0:9867",
     "hadoop.ssl.require.client.cert" : "false",
     "yarn.nodemanager.delete.debug-delay-sec" : "0",
     "dfs.datanode.max.transfer.threads" : "4096"
@@ -9652,7 +9652,7 @@
     "s3.blocksize" : "67108864",
     "mapreduce.task.io.sort.factor" : "10",
     "yarn.nodemanager.disk-health-checker.interval-ms" : "120000",
-    "mapreduce.job.working.dir" : "hdfs://a2115.smile.com:8020/user/jenkins",
+    "mapreduce.job.working.dir" : "hdfs://a2115.smile.com:9820/user/jenkins",
     "yarn.admin.acl" : "*",
     "mapreduce.job.speculative.speculativecap" : "0.1",
     "dfs.namenode.num.checkpoints.retained" : "2",
@@ -9688,7 +9688,7 @@
     "dfs.namenode.edits.dir" : "${dfs.namenode.name.dir}",
     "ha.health-monitor.sleep-after-disconnect.ms" : "1000",
     "dfs.encrypt.data.transfer" : "false",
-    "dfs.datanode.http.address" : "0.0.0.0:50075",
+    "dfs.datanode.http.address" : "0.0.0.0:9864",
     "mapreduce.terasort.num-rows" : "400000000",
     "mapreduce.job.map.class" : "org.apache.hadoop.examples.terasort.TeraGen$SortGenMapper",
     "mapreduce.jobtracker.jobhistory.task.numberprogresssplits" : "12",
@@ -9914,7 +9914,7 @@
     "ftp.stream-buffer-size" : "4096",
     "dfs.namenode.avoid.write.stale.datanode" : "false",
     "hadoop.security.group.mapping.ldap.search.attr.member" : "member",
-    "mapreduce.output.fileoutputformat.outputdir" : "hdfs://a2115.smile.com:8020/user/jenkins/tera-gen-2",
+    "mapreduce.output.fileoutputformat.outputdir" : "hdfs://a2115.smile.com:9820/user/jenkins/tera-gen-2",
     "dfs.blockreport.initialDelay" : "0",
     "yarn.nm.liveness-monitor.expiry-interval-ms" : "600000",
     "hadoop.http.authentication.token.validity" : "36000",
@@ -9940,7 +9940,7 @@
     "yarn.scheduler.maximum-allocation-mb" : "8192",
     "yarn.nodemanager.heartbeat.interval-ms" : "1000",
     "mapreduce.job.userlog.retain.hours" : "24",
-    "dfs.namenode.secondary.http-address" : "0.0.0.0:50090",
+    "dfs.namenode.secondary.http-address" : "0.0.0.0:9868",
     "mapreduce.task.timeout" : "600000",
     "mapreduce.framework.name" : "yarn",
     "ipc.client.idlethreshold" : "4000",
@@ -9960,7 +9960,7 @@
     "hadoop.security.auth_to_local" : "DEFAULT",
     "dfs.secondary.namenode.kerberos.internal.spnego.principal" : "${dfs.web.authentication.kerberos.principal}",
     "ftp.client-write-packet-size" : "65536",
-    "fs.defaultFS" : "hdfs://a2115.smile.com:8020",
+    "fs.defaultFS" : "hdfs://a2115.smile.com:9820",
     "yarn.nodemanager.address" : "0.0.0.0:0",
     "yarn.scheduler.fair.assignmultiple" : "true",
     "yarn.resourcemanager.scheduler.client.thread-count" : "50",
@@ -10004,9 +10004,9 @@
     "fs.s3n.block.size" : "67108864",
     "fs.ftp.host" : "0.0.0.0",
     "hadoop.security.group.mapping" : "org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback",
-    "dfs.datanode.address" : "0.0.0.0:50010",
+    "dfs.datanode.address" : "0.0.0.0:9866",
     "mapreduce.map.skip.maxrecords" : "0",
-    "dfs.datanode.https.address" : "0.0.0.0:50475",
+    "dfs.datanode.https.address" : "0.0.0.0:9865",
     "file.replication" : "1",
     "yarn.resourcemanager.resource-tracker.address" : "a2115.smile.com:8031",
     "dfs.datanode.drop.cache.behind.reads" : "false",
@@ -10073,7 +10073,7 @@
     "yarn.resourcemanager.resource-tracker.client.thread-count" : "50",
     "mapreduce.tasktracker.dns.nameserver" : "default",
     "mapreduce.map.output.compress" : "true",
-    "dfs.datanode.ipc.address" : "0.0.0.0:50020",
+    "dfs.datanode.ipc.address" : "0.0.0.0:9867",
     "hadoop.ssl.require.client.cert" : "false",
     "yarn.nodemanager.delete.debug-delay-sec" : "0",
     "dfs.datanode.max.transfer.threads" : "4096"
@@ -10255,7 +10255,7 @@
 "s3.blocksize" : "67108864",
 "mapreduce.task.io.sort.factor" : "10",
 "yarn.nodemanager.disk-health-checker.interval-ms" : "120000",
-"mapreduce.job.working.dir" : "hdfs://a2115.smile.com:8020/user/jenkins",
+"mapreduce.job.working.dir" : "hdfs://a2115.smile.com:9820/user/jenkins",
 "yarn.admin.acl" : "*",
 "mapreduce.job.speculative.speculativecap" : "0.1",
 "dfs.namenode.num.checkpoints.retained" : "2",
@@ -10291,7 +10291,7 @@
 "dfs.namenode.edits.dir" : "${dfs.namenode.name.dir}",
 "ha.health-monitor.sleep-after-disconnect.ms" : "1000",
 "dfs.encrypt.data.transfer" : "false",
-"dfs.datanode.http.address" : "0.0.0.0:50075",
+"dfs.datanode.http.address" : "0.0.0.0:9864",
 "mapreduce.terasort.num-rows" : "400000000",
 "mapreduce.job.map.class" : "org.apache.hadoop.examples.terasort.TeraGen$SortGenMapper",
 "mapreduce.jobtracker.jobhistory.task.numberprogresssplits" : "12",
@@ -10518,7 +10518,7 @@
 "ftp.stream-buffer-size" : "4096",
 "dfs.namenode.avoid.write.stale.datanode" : "false",
 "hadoop.security.group.mapping.ldap.search.attr.member" : "member",
-"mapreduce.output.fileoutputformat.outputdir" : "hdfs://a2115.smile.com:8020/user/jenkins/tera-gen-1",
+"mapreduce.output.fileoutputformat.outputdir" : "hdfs://a2115.smile.com:9820/user/jenkins/tera-gen-1",
 "dfs.blockreport.initialDelay" : "0",
 "yarn.nm.liveness-monitor.expiry-interval-ms" : "600000",
 "hadoop.http.authentication.token.validity" : "36000",
@@ -10544,7 +10544,7 @@
 "yarn.scheduler.maximum-allocation-mb" : "8192",
 "yarn.nodemanager.heartbeat.interval-ms" : "1000",
 "mapreduce.job.userlog.retain.hours" : "24",
-"dfs.namenode.secondary.http-address" : "0.0.0.0:50090",
+"dfs.namenode.secondary.http-address" : "0.0.0.0:9868",
 "mapreduce.task.timeout" : "600000",
 "mapreduce.framework.name" : "yarn",
 "ipc.client.idlethreshold" : "4000",
@@ -10564,7 +10564,7 @@
 "hadoop.security.auth_to_local" : "DEFAULT",
 "dfs.secondary.namenode.kerberos.internal.spnego.principal" : "${dfs.web.authentication.kerberos.principal}",
 "ftp.client-write-packet-size" : "65536",
-"fs.defaultFS" : "hdfs://a2115.smile.com:8020",
+"fs.defaultFS" : "hdfs://a2115.smile.com:9820",
 "yarn.nodemanager.address" : "0.0.0.0:0",
 "yarn.scheduler.fair.assignmultiple" : "true",
 "yarn.resourcemanager.scheduler.client.thread-count" : "50",
@@ -10608,9 +10608,9 @@
 "fs.s3n.block.size" : "67108864",
 "fs.ftp.host" : "0.0.0.0",
 "hadoop.security.group.mapping" : "org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback",
-"dfs.datanode.address" : "0.0.0.0:50010",
+"dfs.datanode.address" : "0.0.0.0:9866",
 "mapreduce.map.skip.maxrecords" : "0",
-"dfs.datanode.https.address" : "0.0.0.0:50475",
+"dfs.datanode.https.address" : "0.0.0.0:9865",
 "file.replication" : "1",
 "yarn.resourcemanager.resource-tracker.address" : "a2115.smile.com:8031",
 "dfs.datanode.drop.cache.behind.reads" : "false",
@@ -10677,7 +10677,7 @@
 "yarn.resourcemanager.resource-tracker.client.thread-count" : "50",
 "mapreduce.tasktracker.dns.nameserver" : "default",
 "mapreduce.map.output.compress" : "true",
-"dfs.datanode.ipc.address" : "0.0.0.0:50020",
+"dfs.datanode.ipc.address" : "0.0.0.0:9867",
 "hadoop.ssl.require.client.cert" : "false",
 "yarn.nodemanager.delete.debug-delay-sec" : "0",
 "dfs.datanode.max.transfer.threads" : "4096"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/63e5412f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/AddressTypes.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/AddressTypes.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/AddressTypes.java
index 36dbf0c..61298e6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/AddressTypes.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/AddressTypes.java
@@ -34,7 +34,7 @@ public interface AddressTypes {
    * The host/domain name and port are set as separate strings in the address
    * list, e.g.
    * <pre>
-   *   ["namenode.example.org", "50070"]
+   *   ["namenode.example.org", "9870"]
    * </pre>
    */
   public static final String ADDRESS_HOSTNAME_AND_PORT = "host/port";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/63e5412f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java
index 91602e1..cd877b2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java
@@ -147,7 +147,7 @@ public class RegistryTestHelper extends Assert {
     Map<String, String> url = addressList.get(0);
     String addr = url.get("uri");
     assertTrue(addr.contains("http"));
-    assertTrue(addr.contains(":8020"));
+    assertTrue(addr.contains(":9820"));
 
     Endpoint nnipc = findEndpoint(record, NNIPC, false, 1,2);
     assertEquals("wrong protocol in " + nnipc, ProtocolTypes.PROTOCOL_THRIFT,
@@ -275,7 +275,7 @@ public class RegistryTestHelper extends Assert {
         new URI("http", hostname + ":80", "/")));
     entry.addExternalEndpoint(
         restEndpoint(API_WEBHDFS,
-            new URI("http", hostname + ":8020", "/")));
+            new URI("http", hostname + ":9820", "/")));
 
     Endpoint endpoint = ipcEndpoint(API_HDFS, null);
     endpoint.addresses.add(RegistryTypeUtils.hostnamePortPair(hostname, 8030));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/63e5412f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java
index ce7e388..03937ea 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java
@@ -63,7 +63,7 @@ public class TestPBRecordImpl {
     LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
     assertTrue(ret instanceof LocalResourcePBImpl);
     ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(
-      "hdfs://y.ak:8020/foo/bar")));
+      "hdfs://y.ak:9820/foo/bar")));
     ret.setSize(4344L);
     ret.setTimestamp(3141592653589793L);
     ret.setVisibility(LocalResourceVisibility.PUBLIC);


Mime
View raw message