hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1417943 - in /hadoop/common/branches/branch-1-win: ./ src/contrib/streaming/src/java/org/apache/hadoop/streaming/ src/core/org/apache/hadoop/util/ src/mapred/org/apache/hadoop/mapred/ src/test/org/apache/hadoop/util/
Date Thu, 06 Dec 2012 15:27:10 GMT
Author: suresh
Date: Thu Dec  6 15:27:09 2012
New Revision: 1417943

URL: http://svn.apache.org/viewvc?rev=1417943&view=rev
Log:
HADOOP-8645. HADOOP_HOME and -Dhadoop.home (from hadoop wrapper script) are not uniformly
handled. Contributed by John Gordon.

Modified:
    hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt
    hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/Shell.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java
    hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskRunner.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/util/TestWinUtils.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt?rev=1417943&r1=1417942&r2=1417943&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt Thu Dec  6 15:27:09 2012
@@ -240,3 +240,6 @@ Branch-hadoop-1-win - unreleased
 
     HADOOP-9061. Java6+Windows does not work well with symlinks.
     (Ivan Mitic via suresh)
+
+    HADOOP-8645. HADOOP_HOME and -Dhadoop.home (from hadoop wrapper script) are
+    not uniformly handled. (John Gordon via suresh)

Modified: hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=1417943&r1=1417942&r2=1417943&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
(original)
+++ hadoop/common/branches/branch-1-win/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
Thu Dec  6 15:27:09 2012
@@ -69,6 +69,7 @@ import org.apache.hadoop.util.GenericOpt
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.Shell;
 
 /** All the client-side work happens here.
  * (Jar packaging, MapRed job submission and monitoring)
@@ -552,10 +553,13 @@ public class StreamJob implements Tool {
   // --------------------------------------------
 
   protected String getHadoopClientHome() {
-    String h = env_.getProperty("HADOOP_HOME"); // standard Hadoop
-    if (h == null) {
-      //fail("Missing required environment variable: HADOOP_HOME");
-      h = "UNDEF";
+    String h = "UNDEF";
+
+    try {
+      h = Shell.getHadoopHome();
+    } catch (IOException e) {
+      LOG.warn("Missing required environment setting: HADOOP_HOME"
+        + " or hadoop.home.dir:" + e);
     }
     return h;
   }

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/Shell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/Shell.java?rev=1417943&r1=1417942&r2=1417943&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/Shell.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/Shell.java Thu Dec
 6 15:27:09 2012
@@ -48,9 +48,112 @@ abstract public class Shell {
     return IS_JAVA7_OR_ABOVE;
   }
 
+
+  /** Centralized logic to discover and validate the sanity of the Hadoop 
+   *  home directory. Returns either NULL or a directory that exists and 
+   *  was specified via either -Dhadoop.home.dir or the HADOOP_HOME ENV 
+   *  variable.  This does a lot of work so it should only be called 
+   *  privately for initialization once per process.
+   **/
+  private static String checkHadoopHome() {
+
+    // first check the Dflag hadoop.home.dir with JVM scope
+    String home = System.getProperty("hadoop.home.dir");
+
+    // fall back to the system/user-global env variable
+    if (home == null) {
+      home = System.getenv("HADOOP_HOME");
+    }
+
+    try {
+       // couldn't find either setting for hadoop's home directory
+       if (home == null) {
+         throw new IOException("HADOOP_HOME or hadoop.home.dir are not set.");
+       }
+
+       if (home.startsWith("\"") && home.endsWith("\"")) {
+         home = home.substring(1, home.length()-1);
+       }
+
+       // check that the home setting is actually a directory that exists
+       File homedir = new File(home);
+       if (!homedir.isAbsolute() || !homedir.exists() || !homedir.isDirectory()) {
+         throw new IOException("Hadoop home directory " + homedir
+           + " does not exist, is not a directory, or is not an absolute path.");
+       }
+
+       home = homedir.getCanonicalPath();
+
+    } catch (IOException ioe) {
+       LOG.error("Failed to detect a valid hadoop home directory", ioe);
+       home = null;
+    }
+    
+    return home;
+  }
+  private static String HADOOP_HOME_DIR = checkHadoopHome();
+
+  // Public getter, throws an exception if HADOOP_HOME failed validation
+  // checks and is being referenced downstream.
+  public static final String getHadoopHome() throws IOException {
+    if (HADOOP_HOME_DIR == null) {
+      throw new IOException("Misconfigured HADOOP_HOME cannot be referenced.");
+    }
+
+    return HADOOP_HOME_DIR;
+  }
+
+  /** fully qualify the path to a binary that should be in a known hadoop 
+   *  bin location. This is primarily useful for disambiguating call-outs 
+   *  to executable sub-components of Hadoop to avoid clashes with other 
+   *  executables that may be in the path.  Caveat:  this call doesn't 
+   *  just format the path to the bin directory.  It also checks for file 
+   *  existence of the composed path. The output of this call should be 
+   *  cached by callers.
+   * */
+  public static final String getQualifiedBinPath(String executable) 
+  throws IOException {
+    // construct hadoop bin path to the specified executable
+    String fullExeName = HADOOP_HOME_DIR + File.separator + "bin" 
+      + File.separator + executable;
+
+    File exeFile = new File(fullExeName);
+    if (!exeFile.exists()) {
+      throw new IOException("Could not locate executable " + fullExeName
+        + " in the Hadoop binaries.");
+    }
+
+    return exeFile.getCanonicalPath();
+  }
+
+  /** Set to true on Windows platforms */
+  public static final boolean WINDOWS
+                = System.getProperty("os.name").startsWith("Windows");
+  
+  public static final boolean LINUX
+                = System.getProperty("os.name").startsWith("Linux");
+
+  /* Set flag for aiding Windows porting temporarily for branch-1-win*/
+  // TODO - this needs to be fixed
+  public static final boolean DISABLEWINDOWS_TEMPORARILY = WINDOWS; 
+  
   /** a Windows utility to emulate Unix commands */
-  public static final String WINUTILS = System.getenv("HADOOP_HOME")
-                                        + "\\bin\\winutils";
+  public static final String WINUTILS = getWinUtilsPath();
+
+  public static final String getWinUtilsPath() {
+    String winUtilsPath = null;
+
+    try {
+      if (WINDOWS) {
+        winUtilsPath = getQualifiedBinPath("winutils.exe");
+      }
+    } catch (IOException ioe) {
+       LOG.error("Failed to locate the winutils binary in the hadoop binary path",
+         ioe);
+    }
+
+    return winUtilsPath;
+  }
 
   /** a Unix command to get the current user's name */
   public final static String USER_NAME_COMMAND = "whoami";
@@ -208,22 +311,11 @@ abstract public class Shell {
     
     return getUlimitMemoryCommand(memoryLimit);
   }
-  
-  /** Set to true on Windows platforms */
-  public static final boolean WINDOWS
-                = System.getProperty("os.name").startsWith("Windows");
-  
-  public static final boolean LINUX
-                = System.getProperty("os.name").startsWith("Linux");
 
   /** Token separator regex used to parse Shell tool outputs */
   public static final String TOKEN_SEPARATOR_REGEX
                 = WINDOWS ? "[|\n\r]" : "[ \t\n\r\f]";
 
-  /* Set flag for aiding Windows porting temporarily for branch-1-win*/
-  // TODO - this needs to be fixed
-  public static final boolean DISABLEWINDOWS_TEMPORARILY = WINDOWS; 
-  
   private long    interval;   // refresh interval in msec
   private long    lastTime;   // last time the command was performed
   private Map<String, String> environment; // env for the command execution
@@ -569,7 +661,7 @@ abstract public class Shell {
    * @return the output of the executed command.
    */
   public static String execCommand(Map<String,String> env, String ... cmd) 
-  throws IOException {
+      throws IOException {
     return execCommand(env, cmd, 0L);
   }
   

Modified: hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java?rev=1417943&r1=1417942&r2=1417943&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java
(original)
+++ hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java
Thu Dec  6 15:27:09 2012
@@ -38,6 +38,7 @@ import org.apache.hadoop.util.ProcessTre
 import org.apache.hadoop.util.Shell.ExitCodeException;
 import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Shell;
 
 /**
  * A {@link TaskController} that runs the task JVMs as the user 
@@ -73,9 +74,14 @@ class LinuxTaskController extends TaskCo
   @Override
   public void setConf(Configuration conf) {
     super.setConf(conf);
-    File hadoopBin = new File(System.getenv("HADOOP_HOME"), "bin");
-    String defaultTaskController = 
-        new File(hadoopBin, "task-controller").getAbsolutePath();
+
+    String defaultTaskController = null;
+    try {
+      defaultTaskController = Shell.getQualifiedBinPath("task-controller");
+    } catch (IOException ioe) {
+      LOG.warn("Could not locate the default native taskcontroller"+ioe);
+    }
+
     taskControllerExe = conf.get(TASK_CONTROLLER_EXEC_KEY, 
                                  defaultTaskController);       
   }

Modified: hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskRunner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskRunner.java?rev=1417943&r1=1417942&r2=1417943&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskRunner.java
(original)
+++ hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskRunner.java
Thu Dec  6 15:27:09 2012
@@ -77,6 +77,7 @@ abstract class TaskRunner extends Thread
       System.getenv(Shell.WINDOWS ? "USERPROFILE" : "HOME");
   
   static final String HADOOP_WORK_DIR = "HADOOP_WORK_DIR";
+  static final String HADOOP_HOME_DIR = "HADOOP_HOME";
   
   static final String MAPRED_ADMIN_USER_ENV =
     "mapreduce.admin.user.env";
@@ -584,6 +585,16 @@ abstract class TaskRunner extends Thread
     }
     env.put("LD_LIBRARY_PATH", ldLibraryPath.toString());
     env.put(HADOOP_WORK_DIR, workDir.toString());
+
+    try {
+      // When launching tasks, the child may rely on HADOOP_HOME
+      // Make sure it is set even when home is set by 
+      // the -Dhadoop.home.dir flag.
+      env.put(HADOOP_HOME_DIR, Shell.getHadoopHome());
+    } catch (IOException ioe) {
+      LOG.warn("Failed to propagate HADOOP_HOME_DIR to child ENV " + ioe);
+    }
+
     //update user configured login-shell properties
     updateUserLoginEnv(errorInfo, user, conf, env);
     // put jobTokenFile name into env

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/util/TestWinUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/util/TestWinUtils.java?rev=1417943&r1=1417942&r2=1417943&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/util/TestWinUtils.java
(original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/util/TestWinUtils.java
Thu Dec  6 15:27:09 2012
@@ -24,6 +24,8 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
+
+import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileUtil;
@@ -267,10 +269,9 @@ public class TestWinUtils {
     // - Change mode to 677 so owner does not have execute permission.
     // - Verify the owner truly does not have the permissions to execute the file.
 
-    File winutilsFile = new File(Shell.WINUTILS + ".exe");
+    File winutilsFile = new File(Shell.WINUTILS);
     File aExe = new File(TEST_DIR, "a.exe");
-    Shell.execCommand("cmd", "/c", "copy", winutilsFile.getCanonicalPath(),
-      aExe.getCanonicalPath());
+    FileUtils.copyFile(winutilsFile, aExe);
     chmod("677", aExe);
 
     try {



Mime
View raw message