hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r510275 - in /lucene/hadoop/trunk: ./ src/contrib/streaming/src/test/org/apache/hadoop/streaming/ src/java/org/apache/hadoop/filecache/ src/java/org/apache/hadoop/tools/ src/java/org/apache/hadoop/util/ src/test/org/apache/hadoop/mapred/
Date Wed, 21 Feb 2007 22:35:54 GMT
Author: cutting
Date: Wed Feb 21 14:35:53 2007
New Revision: 510275

URL: http://svn.apache.org/viewvc?view=rev&rev=510275
Log:
HADOOP-564.  Replace uses of dfs:// with hdfs://.  Contributed by Wendy.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/filecache/DistributedCache.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRCaching.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=510275&r1=510274&r2=510275
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed Feb 21 14:35:53 2007
@@ -102,6 +102,9 @@
 30. HADOOP-990.  Improve HDFS support for full datanode volumes.
     (Raghu Angadi via cutting)
 
+31. HADOOP-564.  Replace uses of "dfs://" URIs with the more standard
+    "hdfs://".  (Wendy Chien via cutting)
+
 
 Release 0.11.2 - 2007-02-16
 

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java?view=diff&rev=510275&r1=510274&r2=510275
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
(original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
Wed Feb 21 14:35:53 2007
@@ -76,7 +76,7 @@
             "-jobconf", strNamenode,
             "-jobconf", strJobtracker,
             "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
-            "-cacheFile", "dfs://"+fileSys.getName()+CACHE_FILE + "#testlink"
+            "-cacheFile", "hdfs://"+fileSys.getName()+CACHE_FILE + "#testlink"
         };
 
         fileSys.delete(new Path(OUTPUT_DIR));

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/filecache/DistributedCache.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/filecache/DistributedCache.java?view=diff&rev=510275&r1=510274&r2=510275
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/filecache/DistributedCache.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/filecache/DistributedCache.java Wed Feb
21 14:35:53 2007
@@ -44,7 +44,7 @@
   /**
    * 
    * @param cache the cache to be localized, this should be specified as 
-   * new URI(dfs://hostname:port/absoulte_path_to_file#LINKNAME). If no schema 
+   * new URI(hdfs://hostname:port/absoulte_path_to_file#LINKNAME). If no schema 
    * or hostname:port is provided the file is assumed to be in the filesystem
    * being used in the Configuration
    * @param conf The Confguration file which contains the filesystem
@@ -137,7 +137,7 @@
   /*
    * Returns the relative path of the dir this cache will be localized in
    * relative path that this cache will be localized in. For
-   * dfs://hostname:port/absolute_path -- the relative path is
+   * hdfs://hostname:port/absolute_path -- the relative path is
    * hostname/absolute path -- if it is just /absolute_path -- then the
    * relative path is hostname of DFS this mapred cluster is running
    * on/absolute_path
@@ -147,7 +147,7 @@
     String fsname = cache.getScheme();
     String path;
     FileSystem dfs = FileSystem.get(conf);
-    if ("dfs".equals(fsname)) {
+    if ("hdfs".equals(fsname)) {
       path = cache.getHost() + cache.getPath();
     } else {
       String[] split = dfs.getName().split(":");
@@ -348,7 +348,7 @@
   
   private static String getFileSysName(URI url) {
     String fsname = url.getScheme();
-    if ("dfs".equals(fsname)) {
+    if ("hdfs".equals(fsname)) {
       String host = url.getHost();
       int port = url.getPort();
       return (port == (-1)) ? host : (host + ":" + port);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java?view=diff&rev=510275&r1=510274&r2=510275
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java Wed Feb 21 14:35:53
2007
@@ -177,7 +177,7 @@
   doArchive(String logListURI, String archiveDirectory)
   throws IOException
   {
-    String destURL = new String("dfs://" + fsConfig.get("fs.default.name", "local") + 
+    String destURL = new String("hdfs://" + fsConfig.get("fs.default.name", "local") + 
         archiveDirectory);
     CopyFiles.copy(fsConfig, logListURI, destURL, true, false);
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java?view=diff&rev=510275&r1=510274&r2=510275
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java Wed Feb 21 14:35:53
2007
@@ -671,9 +671,8 @@
     ArrayList<String> protocolURIs = new ArrayList<String>(uris.length);
     
     for(int i=0; i < uris.length; ++i) {
-      // uri must start w/ protocol or if protocol is dfs, allow hdfs as alias.
-      if(uris[i].startsWith(protocol) || 
-          (protocol.equalsIgnoreCase("dfs") && uris[i].startsWith("hdfs"))) {
+      // uri must start w/ protocol 
+      if(uris[i].startsWith(protocol)) {
         protocolURIs.add(uris[i]);
       }
     }
@@ -720,8 +719,8 @@
       //Source paths
       srcPaths = fetchSrcURIs(conf, srcURI);  
       
-      // Protocol - 'dfs://'
-      String[] dfsUrls = parseInputFile("dfs", srcPaths);
+      // Protocol - 'hdfs://'
+      String[] dfsUrls = parseInputFile(HDFS, srcPaths);
       if(dfsUrls != null) {
         for(int i=0; i < dfsUrls.length; ++i) {
           copy(conf, dfsUrls[i], destPath, false, ignoreReadFailures);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRCaching.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRCaching.java?view=diff&rev=510275&r1=510274&r2=510275
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRCaching.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MRCaching.java Wed Feb 21 14:35:53
2007
@@ -181,9 +181,9 @@
       archive2 = "file://" + cachePath + "/test.zip";
       file1 = "file://" + cachePath + "/test.txt";
     } else {
-      archive1 = "dfs://" + fileSys + cachePath + "/test.jar";
-      archive2 = "dfs://" + fileSys + cachePath + "/test.zip";
-      file1 = "dfs://" + fileSys + cachePath + "/test.txt";
+      archive1 = "hdfs://" + fileSys + cachePath + "/test.jar";
+      archive2 = "hdfs://" + fileSys + cachePath + "/test.zip";
+      file1 = "hdfs://" + fileSys + cachePath + "/test.txt";
     }
     URI uri1 = null;
     URI uri2 = null;



Mime
View raw message