hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r1128394 [2/2] - in /hadoop/mapreduce/trunk: ./ bin/ ivy/ src/benchmarks/gridmix/ src/benchmarks/gridmix/javasort/ src/benchmarks/gridmix/maxent/ src/benchmarks/gridmix/monsterQuery/ src/benchmarks/gridmix/pipesort/ src/benchmarks/gridmix/s...
Date Fri, 27 May 2011 17:03:26 GMT
Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/LoadTypedBytes.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/LoadTypedBytes.java?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/LoadTypedBytes.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/LoadTypedBytes.java Fri May 27 17:03:23 2011
@@ -89,7 +89,7 @@ public class LoadTypedBytes implements T
   }
 
   private void printUsage() {
-    System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar"
+    System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar"
         + " loadtb <path>");
     System.out.println("  Reads typed bytes from standard input" +
     " and stores them in a sequence file in");

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java Fri May 27 17:03:23 2011
@@ -476,7 +476,7 @@ public class StreamJob implements Tool {
   }
 
   private void printUsage(boolean detailed) {
-    System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar"
+    System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar"
         + " [options]");
     System.out.println("Options:");
     System.out.println("  -input          <path> DFS input file(s) for the Map" 
@@ -525,7 +525,7 @@ public class StreamJob implements Tool {
       System.out.println();      
       System.out.println("For more details about these options:");
       System.out.println("Use " +
-          "$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar -info");
+          "$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar -info");
       return;
     }
     System.out.println();
@@ -585,7 +585,7 @@ public class StreamJob implements Tool {
     System.out.println("  -D stream.non.zero.exit.is.failure=false");
     System.out.println("Use a custom hadoop streaming build along with standard"
         + " hadoop install:");
-    System.out.println("  $HADOOP_HOME/bin/hadoop jar " +
+    System.out.println("  $HADOOP_PREFIX/bin/hadoop jar " +
         "/path/my-hadoop-streaming.jar [...]\\");
     System.out.println("    [...] -D stream.shipped.hadoopstreaming=" +
         "/path/my-hadoop-streaming.jar");
@@ -596,7 +596,7 @@ public class StreamJob implements Tool {
     System.out.println("   -cmdenv EXAMPLE_DIR=/home/example/dictionaries/");
     System.out.println();
     System.out.println("Shortcut:");
-    System.out.println("   setenv HSTREAMING \"$HADOOP_HOME/bin/hadoop jar " +
+    System.out.println("   setenv HSTREAMING \"$HADOOP_PREFIX/bin/hadoop jar " +
         "hadoop-streaming.jar\"");
     System.out.println();
     System.out.println("Example: $HSTREAMING -mapper " +
@@ -619,9 +619,9 @@ public class StreamJob implements Tool {
   // --------------------------------------------
 
   protected String getHadoopClientHome() {
-    String h = env_.getProperty("HADOOP_HOME"); // standard Hadoop
+    String h = env_.getProperty("HADOOP_PREFIX"); // standard Hadoop
     if (h == null) {
-      //fail("Missing required environment variable: HADOOP_HOME");
+      //fail("Missing required environment variable: HADOOP_PREFIX");
       h = "UNDEF";
     }
     return h;
@@ -645,8 +645,8 @@ public class StreamJob implements Tool {
     // usually found in: build/contrib or build/hadoop-<version>-dev-streaming.jar
 
     // First try an explicit spec: it's too hard to find our own location in this case:
-    // $HADOOP_HOME/bin/hadoop jar /not/first/on/classpath/custom-hadoop-streaming.jar
-    // where findInClasspath() would find the version of hadoop-streaming.jar in $HADOOP_HOME
+    // $HADOOP_PREFIX/bin/hadoop jar /not/first/on/classpath/custom-hadoop-streaming.jar
+    // where findInClasspath() would find the version of hadoop-streaming.jar in $HADOOP_PREFIX
     String runtimeClasses = config_.get("stream.shipped.hadoopstreaming"); // jar or class dir
     
     if (runtimeClasses == null) {

Modified: hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java Fri May 27 17:03:23 2011
@@ -94,7 +94,7 @@ public class PostExPerformanceDiagnoser 
    * @param jobConfFile - URL pointing to job configuration (job_conf.xml) file
    * @param jobHistoryFile - URL pointing to job history log file  
    * @param testsConfFileIs - file path for test configuration file (optional). 
-   * If not specified default path is:$HADOOP_HOME/contrib/vaidya/pxpd_tests_config.xml
+   * If not specified default path is:$HADOOP_PREFIX/contrib/vaidya/pxpd_tests_config.xml
    * @param reportFile - file path for storing report (optional)
    */
   public PostExPerformanceDiagnoser (String jobConfFile, String jobHistoryFile, InputStream testsConfFileIs,
@@ -168,7 +168,7 @@ public class PostExPerformanceDiagnoser 
     System.out.println("                       : on local file system and be specified as as an absolute file path.");
     System.out.println("                       : e.g. => /Users/hadoop-user/postex_diagnosis_tests.xml. If not specified default file will be used");
     System.out.println("                       : from the hadoop-{ver}-vaidya.jar in a classpath.");
-    System.out.println("                       : For user to view or make local copy of default tests, file is available at $HADOOP_HOME/contrib/vaidya/conf/postex_diagnosis_tests.xml");
+    System.out.println("                       : For user to view or make local copy of default tests, file is available at $HADOOP_PREFIX/contrib/vaidya/conf/postex_diagnosis_tests.xml");
     System.out.println();
     System.out.println("-report <filepath>     : Optional file path for for storing diagnostic report in a XML format. Path should be available");
     System.out.println("                       : on local file system and be specified as as an absolute file path.");

Modified: hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/vaidya.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/vaidya.sh?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/vaidya.sh (original)
+++ hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/vaidya.sh Fri May 27 17:03:23 2011
@@ -31,9 +31,9 @@ script=`basename "$this"`
 bin=`cd "$bin"; pwd`
 this="$bin/$script"
 
-# Check if HADOOP_HOME AND JAVA_HOME is set.
-if [ -z $HADOOP_HOME ] ; then
-  echo "HADOOP_HOME environment variable not defined"
+# Check if HADOOP_PREFIX AND JAVA_HOME is set.
+if [ -z $HADOOP_PREFIX ] ; then
+  echo "HADOOP_PREFIX environment variable not defined"
   exit -1;
 fi
 
@@ -42,6 +42,6 @@ if [ -z $JAVA_HOME ] ; then
   exit -1;
 fi
 
-hadoopVersion=`$HADOOP_HOME/bin/hadoop version | grep Hadoop | awk '{print $2}'`
+hadoopVersion=`$HADOOP_PREFIX/bin/hadoop version | grep Hadoop | awk '{print $2}'`
 
-$JAVA_HOME/bin/java -Xmx1024m -classpath $HADOOP_HOME/hadoop-${hadoopVersion}-core.jar:$HADOOP_HOME/contrib/vaidya/hadoop-${hadoopVersion}-vaidya.jar:$HADOOP_HOME/lib/commons-logging-1.0.4.jar:${CLASSPATH} org.apache.hadoop.vaidya.postexdiagnosis.PostExPerformanceDiagnoser $@
+$JAVA_HOME/bin/java -Xmx1024m -classpath $HADOOP_PREFIX/hadoop-${hadoopVersion}-core.jar:$HADOOP_PREFIX/contrib/vaidya/hadoop-${hadoopVersion}-vaidya.jar:$HADOOP_PREFIX/lib/commons-logging-1.0.4.jar:${CLASSPATH} org.apache.hadoop.vaidya.postexdiagnosis.PostExPerformanceDiagnoser $@

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml Fri May 27 17:03:23 2011
@@ -192,7 +192,7 @@
         <p>To run the Capacity Scheduler in your Hadoop installation, you need 
         to put it on the <em>CLASSPATH</em>. The easiest way is to copy the 
         <code>hadoop-*-capacity-scheduler.jar</code> from 
-        to <code>HADOOP_HOME/lib</code>. Alternatively, you can modify 
+        to <code>HADOOP_PREFIX/lib</code>. Alternatively, you can modify 
         <em>HADOOP_CLASSPATH</em> to include this jar, in 
         <code>conf/hadoop-env.sh</code>.</p>
     </section>

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/fair_scheduler.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/fair_scheduler.xml?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/fair_scheduler.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/fair_scheduler.xml Fri May 27 17:03:23 2011
@@ -104,7 +104,7 @@
         To run the fair scheduler in your Hadoop installation, you need to put
         it on the CLASSPATH. The easiest way is to copy the 
         <em>hadoop-*-fairscheduler.jar</em> from
-        <em>HADOOP_HOME/build/contrib/fairscheduler</em> to <em>HADOOP_HOME/lib</em>.
+        <em>HADOOP_PREFIX/build/contrib/fairscheduler</em> to <em>HADOOP_PREFIX/lib</em>.
         Alternatively you can modify <em>HADOOP_CLASSPATH</em> to include this jar, in
         <em>HADOOP_CONF_DIR/hadoop-env.sh</em>
       </p>
@@ -127,7 +127,7 @@
       </p>
       <p>
         If you wish to compile the fair scheduler from source, run <em> ant 
-        package</em> in your HADOOP_HOME directory. This will build
+        package</em> in your HADOOP_PREFIX directory. This will build
         <em>build/contrib/fair-scheduler/hadoop-*-fairscheduler.jar</em>.
       </p>
     </section>
@@ -349,7 +349,7 @@
         Only users/pools whose values differ from the defaults need to be
         explicitly configured in this file.
         The allocation file is located in
-        <em>HADOOP_HOME/conf/fair-scheduler.xml</em>.
+        <em>HADOOP_PREFIX/conf/fair-scheduler.xml</em>.
         It can contain the following types of elements:
         </p>
         <ul>

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml Fri May 27 17:03:23 2011
@@ -320,14 +320,14 @@
       <section>
         <title>Usage</title>
         
-        <p>Assuming <code>HADOOP_HOME</code> is the root of the installation and 
+        <p>Assuming <code>HADOOP_PREFIX</code> is the root of the installation and 
         <code>HADOOP_VERSION</code> is the Hadoop version installed, compile 
         <code>WordCount.java</code> and create a jar:</p>
         <p>
           <code>$ mkdir wordcount_classes</code><br/>
           <code>
             $ javac -classpath
-            ${HADOOP_HOME}/hadoop-core-${HADOOP_VERSION}.jar:${HADOOP_HOME}/hadoop-mapred-${HADOOP_VERSION}.jar:${HADOOP_HOME}/hadoop-hdfs-${HADOOP_VERSION}.jar
+            ${HADOOP_PREFIX}/hadoop-core-${HADOOP_VERSION}.jar:${HADOOP_PREFIX}/hadoop-mapred-${HADOOP_VERSION}.jar:${HADOOP_PREFIX}/hadoop-hdfs-${HADOOP_VERSION}.jar
               -d wordcount_classes WordCount.java
           </code><br/>
           <code>$ jar -cvf /user/joe/wordcount.jar -C wordcount_classes/ .</code> 

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/rumen.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/rumen.xml?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/rumen.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/rumen.xml Fri May 27 17:03:23 2011
@@ -456,7 +456,7 @@
         <li><code>Jackson Core</code> (<code>jackson-core-asl-1.4.2.jar</code>)</li>
       </ul>
       
-      <note>One simple way to run Rumen is to use '$HADOOP_HOME/bin/hadoop jar' 
+      <note>One simple way to run Rumen is to use '$HADOOP_PREFIX/bin/hadoop jar' 
               option  to run it.
       </note>
     </section>

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/streaming.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/streaming.xml?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/streaming.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/streaming.xml Fri May 27 17:03:23 2011
@@ -34,7 +34,7 @@ Hadoop streaming is a utility that comes
 script as the mapper and/or the reducer. For example:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper cat \
@@ -81,7 +81,7 @@ with non-zero status are considered to b
 The general command line syntax is shown below. </p>
 <p><strong>Note:</strong> Be sure to place the generic options before the streaming options, otherwise the command will fail. 
 For an example, see <a href="streaming.html#Making+Archives+Available+to+Tasks">Making Archives Available to Tasks</a>.</p>
-<source>$HADOOP_HOME/bin/hadoop command [genericOptions] [streamingOptions]</source>
+<source>$HADOOP_PREFIX/bin/hadoop command [genericOptions] [streamingOptions]</source>
 
 <p>The Hadoop streaming command options are listed here:</p>
 <table>
@@ -110,7 +110,7 @@ For an example, see <a href="streaming.h
 <title>Specifying a Java Class as the Mapper/Reducer</title>
 <p>You can supply a Java class as the mapper and/or the reducer. </p>
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper org.apache.hadoop.mapred.lib.IdentityMapper \
@@ -124,7 +124,7 @@ $HADOOP_HOME/bin/hadoop jar hadoop-strea
 You can specify any executable as the mapper and/or the reducer. The executables do not need to pre-exist on the machines in the cluster; however, if they don't, you will need to use "-file" option to tell the framework to pack your executable files as a part of job submission. For example:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper myPythonScript.py \
@@ -138,7 +138,7 @@ The above example specifies a user defin
 In addition to executable files, you can also package other auxiliary files (such as dictionaries, configuration files, etc) that may be used by the mapper and/or the reducer. For example:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper myPythonScript.py \
@@ -215,7 +215,7 @@ where <code>[identifier]</code> can be <
 The general command line syntax is shown below. </p>
 <p><strong>Note:</strong> Be sure to place the generic options before the streaming options, otherwise the command will fail. 
 For an example, see <a href="streaming.html#Making+Archives+Available+to+Tasks">Making Archives Available to Tasks</a>.</p>
-<source>$HADOOP_HOME/bin/hadoop command [genericOptions] [streamingOptions]</source>
+<source>$HADOOP_PREFIX/bin/hadoop command [genericOptions] [streamingOptions]</source>
 
 <p>The Hadoop generic command options you can use with streaming are listed here:</p>
 <table>
@@ -281,7 +281,7 @@ To specify the number of reducers, for e
 -numReduceTasks as two or simply set mapreduce.job.reduces to two.
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -numReduceTasks 2 \
     -input myInputDirs \
     -output myOutputDir \
@@ -306,7 +306,7 @@ For example:
 </p>
 
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -D stream.map.output.field.separator=. \
     -D stream.num.map.output.key.fields=4 \
     -input myInputDirs \
@@ -402,7 +402,7 @@ In this example, the input.txt file has 
 "cachedir.jar" is a symlink to the archived directory, which has the files "cache.txt" and "cache2.txt". 
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
                   -archives 'hdfs://hadoop-nn1.example.com/user/me/samples/cachefile/cachedir.jar' \  
                   -D mapreduce.job.maps=1 \
                   -D mapreduce.job.name="Experiment" \
@@ -459,7 +459,7 @@ framework to partition the map outputs b
 the whole keys. For example:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -D stream.map.output.field.separator=. \
     -D stream.num.map.output.key.fields=4 \
     -D mapreduce.map.output.key.field.separator=. \
@@ -522,7 +522,7 @@ that is useful for many applications. Th
 provided by the Unix/GNU Sort. For example:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -D mapreduce.job.output.key.comparator.class=org.apache.hadoop.mapred.lib.KeyFieldBasedComparator \
     -D stream.map.output.field.separator=. \
     -D stream.num.map.output.key.fields=4 \
@@ -578,7 +578,7 @@ aggregatable items by invoking the appro
 To use Aggregate, simply specify "-reducer aggregate":
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -numReduceTasks 12 \
     -input myInputDirs \
     -output myOutputDir \
@@ -622,7 +622,7 @@ Similarly, the reduce function defined i
 You can select an arbitrary list of fields as the reduce output key, and an arbitrary list of fields as the reduce output value. For example:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -D mapreduce.map.output.key.field.separator=. \
     -D mapreduce.partition.keypartitioner.options=-k1,2 \
     -D mapreduce.fieldsel.data.field.separator=. \
@@ -812,7 +812,7 @@ bruce   70
 charlie 80
 dan     75
 
-$ c2='cut -f2'; $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar \
+$ c2='cut -f2'; $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar \
     -D mapreduce.job.name='Experiment'
     -input /user/me/samples/student_marks 
     -output /user/me/samples/student_out 

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/vaidya.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/vaidya.xml?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/vaidya.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/vaidya.xml Fri May 27 17:03:23 2011
@@ -46,7 +46,7 @@
       <p>Ensure that Hadoop is installed and configured. More details:</p> 
       <ul>
         <li>
-          Make sure HADOOP_HOME environment variable is set.
+          Make sure HADOOP_PREFIX environment variable is set.
         </li>
         <li>
           Make sure Java is installed and configured as a part of the Hadoop installation.
@@ -82,7 +82,7 @@
 			     a set of counters for each Map and Reduce task.</li>
 			<li> <em>Diagnostic Test/Rule</em>: This is a program logic that detects the inefficiency of M/R job based on the job statistics. The
 				 description of the Test is specified as an XML element (DiagnosticTest) in a test description file e.g. 
-				 default tests description file, <em>$HADOOP_HOME/contrib/vaidya/conf/postex_diagnosis_tests.xml</em>. The actual logic is coded as
+				 default tests description file, <em>$HADOOP_PREFIX/contrib/vaidya/conf/postex_diagnosis_tests.xml</em>. The actual logic is coded as
 				 a java class and referenced in the DiagnosticTest XML element. </li>
 		</ul>
 	<p></p>
@@ -122,18 +122,18 @@
 	<section>
 		<title>How to Execute the Hadoop Vaidya Tool</title>
 		  
-      	<p>Script to execute Hadoop Vaidya is in <code>$HADOOP_HOME/contrib/vaidya/bin/</code> directory.
+      	<p>Script to execute Hadoop Vaidya is in <code>$HADOOP_PREFIX/contrib/vaidya/bin/</code> directory.
 		   It comes with a default set of rules defined in file: 
-           <code>$HADOOP_HOME/contrib/vaidya/conf/postex_diagnosis_tests.xml</code> </p>
+           <code>$HADOOP_PREFIX/contrib/vaidya/conf/postex_diagnosis_tests.xml</code> </p>
 		  <ul>
-			<li>Make sure HADOOP_HOME environment variable is set and Java is installed and configured.</li>
+			<li>Make sure HADOOP_PREFIX environment variable is set and Java is installed and configured.</li>
 			<li>Execute the Hadoop Vaidya script with -help (or without any arguments) to get the command line help. e.g. 
-                       <code>=>sh $HADOOP_HOME/contrib/vaidya/bin/vaidya.sh -help</code></li>
+                       <code>=>sh $HADOOP_PREFIX/contrib/vaidya/bin/vaidya.sh -help</code></li>
 			<li>User needs to 
 				 supply job's configuration file (<code>-jobconf job_conf.xml</code>), job history log file (<code>-joblog job_history_log_file</code>), and optionally the test description
-				 file (<code>-testconf postex_diagonostic_tests.xml</code>). If test description file is not specified then the default one is picked up from the Hadoop Vaidya Jar (<code>$HADOOP_HOME/contrib/vaidya/hadoop-{version}-vaidya.jar</code>).
+				 file (<code>-testconf postex_diagonostic_tests.xml</code>). If test description file is not specified then the default one is picked up from the Hadoop Vaidya Jar (<code>$HADOOP_PREFIX/contrib/vaidya/hadoop-{version}-vaidya.jar</code>).
 				 This default test description file is also available at following location for users to make a local copy, modify and add new test rules: 
-			     <code>$HADOOP_HOME/contrib/vaidya/conf/postex_diagnostic_tests.xml</code></li>
+			     <code>$HADOOP_PREFIX/contrib/vaidya/conf/postex_diagnostic_tests.xml</code></li>
 			<li> Use <code>-report report_file</code> option to store the xml report into specified report_file. </li>  
 		 </ul>
 	</section>
@@ -152,11 +152,11 @@
  				<li> getReferenceDetails() </li> 
               </ul>
           </li>
-		  <li>Make a local copy of the <code>$HADOOP_HOME/contrib/vaidya/conf/postex_diagnostic_tests.xml</code> file or create a new test description XML file.</li>
+		  <li>Make a local copy of the <code>$HADOOP_PREFIX/contrib/vaidya/conf/postex_diagnostic_tests.xml</code> file or create a new test description XML file.</li>
 		  <li>Add the test description element for your new test case to this test description file.</li>
 		  <li>Compile your new test class (or multiple classes), archive them into a Jar file and add it to the CLASSPATH e.g. (<code>export CLASSPATH=$CLASSPATH:newtests.jar</code>)</li>
 		  <li>Execute the Hadoop Vaidya script with the job configuration, job history log and reference to newly created test description file using <em>--testconf</em> option. 
-		  <code>=>sh $HADOOP_HOME/contrib/vaidya/bin/vaidya.sh -joblog job_history_log_file -jobconf job.xml -testconf new_test_description_file -report report.xml</code></li>
+		  <code>=>sh $HADOOP_PREFIX/contrib/vaidya/bin/vaidya.sh -joblog job_history_log_file -jobconf job.xml -testconf new_test_description_file -report report.xml</code></li>
 		</ul>
 	</section>
 	

Modified: hadoop/mapreduce/trunk/src/examples/python/compile
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/examples/python/compile?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/examples/python/compile (original)
+++ hadoop/mapreduce/trunk/src/examples/python/compile Fri May 27 17:03:23 2011
@@ -12,19 +12,19 @@
 #   limitations under the License.
 
 
-export HADOOP_HOME=../../..
+export HADOOP_PREFIX=../../..
 
-export CLASSPATH="$HADOOP_HOME/build/classes"
+export CLASSPATH="$HADOOP_PREFIX/build/classes"
 
 # so that filenames w/ spaces are handled correctly in loops below
 IFS=
 
 # add libs to CLASSPATH
-for f in $HADOOP_HOME/lib/*.jar; do
+for f in $HADOOP_PREFIX/lib/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
-for f in $HADOOP_HOME/lib/jetty-ext/*.jar; do
+for f in $HADOOP_PREFIX/lib/jetty-ext/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 

Modified: hadoop/mapreduce/trunk/src/examples/python/pyAbacus/compile
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/examples/python/pyAbacus/compile?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/examples/python/pyAbacus/compile (original)
+++ hadoop/mapreduce/trunk/src/examples/python/pyAbacus/compile Fri May 27 17:03:23 2011
@@ -12,20 +12,20 @@
 #   limitations under the License.
 
 
-export HADOOP_HOME=../../../../..
+export HADOOP_PREFIX=../../../../..
 
-export CLASSPATH="$HADOOP_HOME/build/classes"
-export CLASSPATH=${CLASSPATH}:"$HADOOP_HOME/build/contrib/abacus/classes"
+export CLASSPATH="$HADOOP_PREFIX/build/classes"
+export CLASSPATH=${CLASSPATH}:"$HADOOP_PREFIX/build/contrib/abacus/classes"
 
 # so that filenames w/ spaces are handled correctly in loops below
 IFS=
 
 # add libs to CLASSPATH
-for f in $HADOOP_HOME/lib/*.jar; do
+for f in $HADOOP_PREFIX/lib/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
-for f in $HADOOP_HOME/lib/jetty-ext/*.jar; do
+for f in $HADOOP_PREFIX/lib/jetty-ext/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
@@ -33,5 +33,5 @@ done
 unset IFS
 jythonc -p org.apache.hadoop.abacus.examples -d -j jwc.jar -c JythonAbacus.py JyAbacusWCPlugIN.py
 
-jar -uvf jwc.jar -C $HADOOP_HOME/build/contrib/abacus/classes .
+jar -uvf jwc.jar -C $HADOOP_PREFIX/build/contrib/abacus/classes .
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LinuxTaskController.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LinuxTaskController.java?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LinuxTaskController.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LinuxTaskController.java Fri May 27 17:03:23 2011
@@ -71,9 +71,9 @@ class LinuxTaskController extends TaskCo
   private static String taskControllerExe;
   
   static {
-    // the task-controller is expected to be under the $HADOOP_HOME/bin
+    // the task-controller is expected to be under the $HADOOP_PREFIX/bin
     // directory.
-    File hadoopBin = new File(System.getenv("HADOOP_HOME"), "bin");
+    File hadoopBin = new File(System.getenv("HADOOP_PREFIX"), "bin");
     taskControllerExe = 
         new File(hadoopBin, "task-controller").getAbsolutePath();
   }

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java Fri May 27 17:03:23 2011
@@ -316,7 +316,7 @@ public class Submitter extends Configure
     if (exec.contains("#")) {
       DistributedCache.createSymlink(conf);
       // set default gdb commands for map and reduce task 
-      String defScript = "$HADOOP_HOME/src/c++/pipes/debug/pipes-default-script";
+      String defScript = "$HADOOP_PREFIX/src/c++/pipes/debug/pipes-default-script";
       setIfUnset(conf, MRJobConfig.MAP_DEBUG_SCRIPT,defScript);
       setIfUnset(conf, MRJobConfig.REDUCE_DEBUG_SCRIPT,defScript);
     }

Added: hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/conffile
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/conffile?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/conffile (added)
+++ hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/conffile Fri May 27 17:03:23 2011
@@ -0,0 +1,15 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+/etc/hadoop

Added: hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/control
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/control?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/control (added)
+++ hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/control Fri May 27 17:03:23 2011
@@ -0,0 +1,24 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+Package: hadoop-mapreduce
+Version: @version@
+Section: misc
+Priority: optional
+Provides: hadoop-mapreduce
+Architecture: all
+Depends: openjdk-6-jre-headless, hadoop-common, hadoop-hdfs
+Maintainer: Apache Software Foundation <general@hadoop.apache.org>
+Description: The Apache Hadoop project develops open-source software for reliable, scalable, distributed computing.
+Distribution: development

Added: hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/postinst
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/postinst?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/postinst (added)
+++ hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/postinst Fri May 27 17:03:23 2011
@@ -0,0 +1,24 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bash /usr/sbin/update-mapred-env.sh \
+  --prefix=/usr \
+  --bin-dir=/usr/bin \
+  --sbin-dir=/usr/sbin \
+  --conf-dir=/etc/hadoop \
+  --log-dir=/var/log/hadoop \
+  --pid-dir=/var/run/hadoop

Added: hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/postrm
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/postrm?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/postrm (added)
+++ hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/postrm Fri May 27 17:03:23 2011
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+/usr/sbin/groupdel hadoop 2> /dev/null >dev/null
+exit 0

Added: hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/preinst
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/preinst?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/preinst (added)
+++ hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/preinst Fri May 27 17:03:23 2011
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+/usr/sbin/useradd --comment "Hadoop MapReduce" --shell /bin/bash -M -r --groups hadoop --home /var/lib/hadoop/mapred mapred 2> /dev/null || :

Added: hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/prerm
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/prerm?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/prerm (added)
+++ hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/prerm Fri May 27 17:03:23 2011
@@ -0,0 +1,25 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bash /usr/sbin/update-mapred-env.sh \
+  --prefix=/usr \
+  --bin-dir=/usr/bin \
+  --sbin-dir=/usr/sbin \
+  --conf-dir=/etc/hadoop \
+  --log-dir=/var/log/hadoop \
+  --pid-dir=/var/run/hadoop \
+  --uninstal

Added: hadoop/mapreduce/trunk/src/packages/deb/init.d/hadoop-jobtracker
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/deb/init.d/hadoop-jobtracker?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/deb/init.d/hadoop-jobtracker (added)
+++ hadoop/mapreduce/trunk/src/packages/deb/init.d/hadoop-jobtracker Fri May 27 17:03:23 2011
@@ -0,0 +1,142 @@
+#! /bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+### BEGIN INIT INFO
+# Provides:		hadoop-jobtracker	
+# Required-Start:	$remote_fs $syslog
+# Required-Stop:	$remote_fs $syslog
+# Default-Start:	2 3 4 5
+# Default-Stop:		
+# Short-Description:	Apache Hadoop Job Tracker server
+### END INIT INFO
+
+set -e
+
+# /etc/init.d/hadoop-jobtracker: start and stop the Apache Hadoop Job Tracker daemon
+
+test -x /usr/bin/hadoop || exit 0
+( /usr/bin/hadoop 2>&1 | grep -q hadoop ) 2>/dev/null || exit 0
+
+umask 022
+
+if test -f /etc/default/hadoop-env.sh; then
+    . /etc/default/hadoop-env.sh
+fi
+
+. /lib/lsb/init-functions
+
+# Are we running from init?
+run_by_init() {
+    ([ "$previous" ] && [ "$runlevel" ]) || [ "$runlevel" = S ]
+}
+
+check_for_no_start() {
+    # forget it if we're trying to start, and /etc/hadoop/hadoop-jobtracker_not_to_be_run exists
+    if [ -e /etc/hadoop/hadoop-jobtracker_not_to_be_run ]; then 
+	if [ "$1" = log_end_msg ]; then
+	    log_end_msg 0
+	fi
+	if ! run_by_init; then
+	    log_action_msg "Apache Hadoop Job Tracker server not in use (/etc/hadoop/hadoop-jobtracker_not_to_be_run)"
+	fi
+	exit 0
+    fi
+}
+
+check_privsep_dir() {
+    # Create the PrivSep empty dir if necessary
+    if [ ! -d ${HADOOP_PID_DIR} ]; then
+	mkdir -p ${HADOOP_PID_DIR}
+        chown root:hadoop ${HADOOP_PID_DIR}
+	chmod 0775 ${HADOOP_PID_DIR} 
+    fi
+}
+
+export PATH="${PATH:+$PATH:}/usr/sbin:/usr/bin"
+
+case "$1" in
+  start)
+	check_privsep_dir
+	check_for_no_start
+	log_daemon_msg "Starting Apache Hadoop Job Tracker server" "hadoop-jobtracker"
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start jobtracker; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+  stop)
+	log_daemon_msg "Stopping Apache Hadoop Job Tracker server" "hadoop-jobtracker"
+	if start-stop-daemon --stop --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Job Tracker server" "hadoop-jobtracker"
+	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid
+	check_for_no_start log_end_msg
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start jobtracker; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  try-restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Job Tracker server" "hadoop-jobtracker"
+	set +e
+	start-stop-daemon --stop --quiet --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid
+	RET="$?"
+	set -e
+	case $RET in
+	    0)
+		# old daemon stopped
+		check_for_no_start log_end_msg
+		if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start jobtracker; then
+		    log_end_msg 0
+		else
+		    log_end_msg 1
+		fi
+		;;
+	    1)
+		# daemon not running
+		log_progress_msg "(not running)"
+		log_end_msg 0
+		;;
+	    *)
+		# failed to stop
+		log_progress_msg "(failed to stop)"
+		log_end_msg 1
+		;;
+	esac
+	;;
+
+  status)
+	status_of_proc -p ${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid ${JAVA_HOME}/bin/java hadoop-jobtracker && exit 0 || exit $?
+	;;
+
+  *)
+	log_action_msg "Usage: /etc/init.d/hadoop-jobtracker {start|stop|restart|try-restart|status}"
+	exit 1
+esac
+
+exit 0

Added: hadoop/mapreduce/trunk/src/packages/deb/init.d/hadoop-tasktracker
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/deb/init.d/hadoop-tasktracker?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/deb/init.d/hadoop-tasktracker (added)
+++ hadoop/mapreduce/trunk/src/packages/deb/init.d/hadoop-tasktracker Fri May 27 17:03:23 2011
@@ -0,0 +1,142 @@
+#! /bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+### BEGIN INIT INFO
+# Provides:		hadoop-tasktracker	
+# Required-Start:	$remote_fs $syslog
+# Required-Stop:	$remote_fs $syslog
+# Default-Start:	2 3 4 5
+# Default-Stop:		
+# Short-Description:	Apache Hadoop Task Tracker server
+### END INIT INFO
+
+set -e
+
+# /etc/init.d/hadoop-tasktracker: start and stop the Apache Hadoop Task Tracker daemon
+
+test -x /usr/bin/hadoop || exit 0
+( /usr/bin/hadoop 2>&1 | grep -q hadoop ) 2>/dev/null || exit 0
+
+umask 022
+
+if test -f /etc/default/hadoop-env.sh; then
+    . /etc/default/hadoop-env.sh
+fi
+
+. /lib/lsb/init-functions
+
+# Are we running from init?
+run_by_init() {
+    ([ "$previous" ] && [ "$runlevel" ]) || [ "$runlevel" = S ]
+}
+
+check_for_no_start() {
+    # forget it if we're trying to start, and /etc/hadoop/hadoop-tasktracker_not_to_be_run exists
+    if [ -e /etc/hadoop/hadoop-tasktracker_not_to_be_run ]; then 
+	if [ "$1" = log_end_msg ]; then
+	    log_end_msg 0
+	fi
+	if ! run_by_init; then
+	    log_action_msg "Apache Hadoop Task Tracker server not in use (/etc/hadoop/hadoop-tasktracker_not_to_be_run)"
+	fi
+	exit 0
+    fi
+}
+
+check_privsep_dir() {
+    # Create the PrivSep empty dir if necessary
+    if [ ! -d ${HADOOP_PID_DIR} ]; then
+	mkdir -p ${HADOOP_PID_DIR}
+        chown root:hadoop ${HADOOP_PID_DIR}
+	chmod 0775 ${HADOOP_PID_DIR} 
+    fi
+}
+
+export PATH="${PATH:+$PATH:}/usr/sbin:/usr/bin"
+
+case "$1" in
+  start)
+	check_privsep_dir
+	check_for_no_start
+	log_daemon_msg "Starting Apache Hadoop Task Tracker server" "hadoop-tasktracker"
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start tasktracker; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+  stop)
+	log_daemon_msg "Stopping Apache Hadoop Task Tracker server" "hadoop-tasktracker"
+	if start-stop-daemon --stop --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Task Tracker server" "hadoop-tasktracker"
+	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid
+	check_for_no_start log_end_msg
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start tasktracker; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  try-restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Task Tracker server" "hadoop-tasktracker"
+	set +e
+	start-stop-daemon --stop --quiet --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid
+	RET="$?"
+	set -e
+	case $RET in
+	    0)
+		# old daemon stopped
+		check_for_no_start log_end_msg
+		if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid -c mapred -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start tasktracker; then
+		    log_end_msg 0
+		else
+		    log_end_msg 1
+		fi
+		;;
+	    1)
+		# daemon not running
+		log_progress_msg "(not running)"
+		log_end_msg 0
+		;;
+	    *)
+		# failed to stop
+		log_progress_msg "(failed to stop)"
+		log_end_msg 1
+		;;
+	esac
+	;;
+
+  status)
+	status_of_proc -p ${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid ${JAVA_HOME}/bin/java hadoop-tasktracker && exit 0 || exit $?
+	;;
+
+  *)
+	log_action_msg "Usage: /etc/init.d/hadoop-tasktracker {start|stop|restart|try-restart|status}"
+	exit 1
+esac
+
+exit 0

Added: hadoop/mapreduce/trunk/src/packages/rpm/init.d/hadoop-jobtracker
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/rpm/init.d/hadoop-jobtracker?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/rpm/init.d/hadoop-jobtracker (added)
+++ hadoop/mapreduce/trunk/src/packages/rpm/init.d/hadoop-jobtracker Fri May 27 17:03:23 2011
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 
+# Starts a Hadoop jobtracker
+# 
+# chkconfig: 2345 90 10
+# description: Hadoop jobtracker
+
+source /etc/rc.d/init.d/functions
+source /etc/default/hadoop-env.sh
+
+RETVAL=0
+PIDFILE="${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid"
+desc="Hadoop jobtracker daemon"
+
+start() {
+  echo -n $"Starting $desc (hadoop-jobtracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start jobtracker
+  RETVAL=$?
+  echo
+  [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-jobtracker
+  return $RETVAL
+}
+
+stop() {
+  echo -n $"Stopping $desc (hadoop-jobtracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop jobtracker
+  RETVAL=$?
+  sleep 5
+  echo
+  [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/hadoop-jobtracker $PIDFILE
+}
+
+restart() {
+  stop
+  start
+}
+
+checkstatus(){
+  status -p $PIDFILE ${JAVA_HOME}/bin/java
+  RETVAL=$?
+}
+
+condrestart(){
+  [ -e /var/lock/subsys/hadoop-jobtracker ] && restart || :
+}
+
+case "$1" in
+  start)
+    start
+    ;;
+  stop)
+    stop
+    ;;
+  status)
+    checkstatus
+    ;;
+  restart)
+    restart
+    ;;
+  condrestart)
+    condrestart
+    ;;
+  *)
+    echo $"Usage: $0 {start|stop|status|restart|condrestart}"
+    exit 1
+esac
+
+exit $RETVAL

Added: hadoop/mapreduce/trunk/src/packages/rpm/init.d/hadoop-tasktracker
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/rpm/init.d/hadoop-tasktracker?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/rpm/init.d/hadoop-tasktracker (added)
+++ hadoop/mapreduce/trunk/src/packages/rpm/init.d/hadoop-tasktracker Fri May 27 17:03:23 2011
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 
+# Starts a Hadoop tasktracker
+# 
+# chkconfig: 2345 90 10
+# description: Hadoop tasktracker
+
+source /etc/rc.d/init.d/functions
+source /etc/default/hadoop-env.sh
+
+RETVAL=0
+PIDFILE="${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid"
+desc="Hadoop tasktracker daemon"
+
+start() {
+  echo -n $"Starting $desc (hadoop-tasktracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start tasktracker
+  RETVAL=$?
+  echo
+  [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-tasktracker
+  return $RETVAL
+}
+
+stop() {
+  echo -n $"Stopping $desc (hadoop-tasktracker): "
+  daemon --user mapred ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop tasktracker
+  RETVAL=$?
+  sleep 5
+  echo
+  [ $RETVAL -eq 0 ] && rm -f /var/lock/subsys/hadoop-tasktracker $PIDFILE
+}
+
+restart() {
+  stop
+  start
+}
+
+checkstatus(){
+  status -p $PIDFILE ${JAVA_HOME}/bin/java
+  RETVAL=$?
+}
+
+condrestart(){
+  [ -e /var/lock/subsys/hadoop-tasktracker ] && restart || :
+}
+
+case "$1" in
+  start)
+    start
+    ;;
+  stop)
+    stop
+    ;;
+  status)
+    checkstatus
+    ;;
+  restart)
+    restart
+    ;;
+  condrestart)
+    condrestart
+    ;;
+  *)
+    echo $"Usage: $0 {start|stop|status|restart|condrestart}"
+    exit 1
+esac
+
+exit $RETVAL

Added: hadoop/mapreduce/trunk/src/packages/rpm/spec/hadoop-mapred.spec
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/rpm/spec/hadoop-mapred.spec?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/rpm/spec/hadoop-mapred.spec (added)
+++ hadoop/mapreduce/trunk/src/packages/rpm/spec/hadoop-mapred.spec Fri May 27 17:03:23 2011
@@ -0,0 +1,178 @@
+#   Licensed to the Apache Software Foundation (ASF) under one or more
+#   contributor license agreements.  See the NOTICE file distributed with
+#   this work for additional information regarding copyright ownership.
+#   The ASF licenses this file to You under the Apache License, Version 2.0
+#   (the "License"); you may not use this file except in compliance with
+#   the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+#
+# RPM Spec file for Hadoop version @version@
+#
+
+%define name         hadoop-mapreduce
+%define version      @version@
+%define release      @package.release@
+
+# Installation Locations
+%define _prefix      @package.prefix@
+%define _bin_dir     %{_prefix}/bin
+%define _conf_dir    @package.conf.dir@
+%define _include_dir %{_prefix}/include
+%define _lib_dir     %{_prefix}/lib
+%define _lib64_dir   %{_prefix}/lib64
+%define _libexec_dir %{_prefix}/libexec
+%define _log_dir     @package.log.dir@
+%define _pid_dir     @package.pid.dir@
+%define _sbin_dir    %{_prefix}/sbin
+%define _share_dir   %{_prefix}/share
+%define _var_dir     @package.var.dir@
+
+# Build time settings
+%define _build_dir  @package.build.dir@
+%define _final_name @final.name@
+%define debug_package %{nil}
+
+# Disable brp-java-repack-jars for aspect J
+%define __os_install_post    \
+    /usr/lib/rpm/redhat/brp-compress \
+    %{!?__debug_package:/usr/lib/rpm/redhat/brp-strip %{__strip}} \
+    /usr/lib/rpm/redhat/brp-strip-static-archive %{__strip} \
+    /usr/lib/rpm/redhat/brp-strip-comment-note %{__strip} %{__objdump} \
+    /usr/lib/rpm/brp-python-bytecompile %{nil}
+
+# RPM searches perl files for dependancies and this breaks for non packaged perl lib
+# like thrift so disable this
+%define _use_internal_dependency_generator 0
+
+%ifarch i386
+%global hadoop_arch Linux-i386-32
+%endif
+%ifarch amd64 x86_64
+%global hadoop_arch Linux-amd64-64
+%endif
+%ifarch noarch
+%global hadoop_arch ""
+%endif
+
+Summary: The Apache Hadoop project develops open-source software for reliable, scalable, distributed computing
+License: Apache License, Version 2.0
+URL: http://hadoop.apache.org/core/
+Vendor: Apache Software Foundation
+Group: Development/Libraries
+Name: %{name}
+Version: %{version}
+Release: %{release} 
+Source0: %{_final_name}-bin.tar.gz
+Prefix: %{_prefix}
+Prefix: %{_conf_dir}
+Prefix: %{_log_dir}
+Prefix: %{_pid_dir}
+Buildroot: %{_build_dir}
+Requires: sh-utils, textutils, /usr/sbin/useradd, /usr/sbin/usermod, /sbin/chkconfig, /sbin/service, jdk >= 1.6, hadoop-common >= %{version}, hadoop-hdfs >= %{version}
+AutoReqProv: no
+Provides: hadoop-mapreduce
+
+%description
+The Apache Hadoop project develops open-source software for reliable, scalable, 
+distributed computing.  Hadoop includes these subprojects:
+
+MapReduce: A software framework for distributed processing of large data sets on compute clusters.
+
+%prep
+%setup -n %{_final_name}
+
+%build
+if [ -d ${RPM_BUILD_DIR}%{_prefix} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_prefix}
+fi
+
+if [ -d ${RPM_BUILD_DIR}%{_log_dir} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_log_dir}
+fi
+
+if [ -d ${RPM_BUILD_DIR}%{_conf_dir} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_conf_dir}
+fi
+
+if [ -d ${RPM_BUILD_DIR}%{_pid_dir} ]; then
+  rm -rf ${RPM_BUILD_DIR}%{_pid_dir}
+fi
+
+mkdir -p ${RPM_BUILD_DIR}%{_prefix}
+mkdir -p ${RPM_BUILD_DIR}%{_bin_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_lib_dir}
+%ifarch amd64 x86_64
+mkdir -p ${RPM_BUILD_DIR}%{_lib64_dir}
+%endif
+mkdir -p ${RPM_BUILD_DIR}%{_libexec_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_log_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_conf_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_pid_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_sbin_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_share_dir}
+mkdir -p ${RPM_BUILD_DIR}%{_var_dir}
+mkdir -p ${RPM_BUILD_DIR}/etc/init.d
+
+cp ${RPM_BUILD_DIR}/%{_final_name}/sbin/hadoop-jobtracker.redhat ${RPM_BUILD_DIR}/etc/init.d/hadoop-jobtracker
+cp ${RPM_BUILD_DIR}/%{_final_name}/sbin/hadoop-tasktracker.redhat ${RPM_BUILD_DIR}/etc/init.d/hadoop-tasktracker
+chmod 0755 ${RPM_BUILD_DIR}/etc/init.d/hadoop-*
+
+#########################
+#### INSTALL SECTION ####
+#########################
+%install
+mv ${RPM_BUILD_DIR}/%{_final_name}/bin/* ${RPM_BUILD_DIR}%{_bin_dir}
+rm -f ${RPM_BUILD_DIR}/%{_final_name}/etc/hadoop/configuration.xsl
+rm -f ${RPM_BUILD_DIR}/%{_final_name}/etc/hadoop/hadoop-metrics2.properties
+mv ${RPM_BUILD_DIR}/%{_final_name}/etc/hadoop/* ${RPM_BUILD_DIR}%{_conf_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/include/* ${RPM_BUILD_DIR}%{_include_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/lib/* ${RPM_BUILD_DIR}%{_lib_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/libexec/* ${RPM_BUILD_DIR}%{_libexec_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/sbin/* ${RPM_BUILD_DIR}%{_sbin_dir}
+mv ${RPM_BUILD_DIR}/%{_final_name}/share/* ${RPM_BUILD_DIR}%{_share_dir}
+rm -rf ${RPM_BUILD_DIR}/%{_final_name}/etc
+
+%pre
+getent group hadoop 2>/dev/null >/dev/null || /usr/sbin/groupadd -r hadoop
+/usr/sbin/useradd --comment "Hadoop MapReduce" --shell /bin/bash -M -r --groups hadoop --home %{_var_dir}/mapred mapred 2> /dev/null || :
+
+%post
+bash ${RPM_INSTALL_PREFIX0}/sbin/update-mapred-env.sh \
+       --prefix=${RPM_INSTALL_PREFIX0} \
+       --bin-dir=${RPM_INSTALL_PREFIX0}/bin \
+       --sbin-dir=${RPM_INSTALL_PREFIX0}/sbin \
+       --conf-dir=${RPM_INSTALL_PREFIX1} \
+       --log-dir=${RPM_INSTALL_PREFIX2} \
+       --pid-dir=${RPM_INSTALL_PREFIX3}
+
+%preun
+bash ${RPM_INSTALL_PREFIX0}/sbin/update-mapred-env.sh \
+       --prefix=${RPM_INSTALL_PREFIX0} \
+       --bin-dir=${RPM_INSTALL_PREFIX0}/bin \
+       --sbin-dir=${RPM_INSTALL_PREFIX0}/sbin \
+       --conf-dir=${RPM_INSTALL_PREFIX1} \
+       --log-dir=${RPM_INSTALL_PREFIX2} \
+       --pid-dir=${RPM_INSTALL_PREFIX3} \
+       --uninstall
+
+%files 
+%defattr(-,root,root)
+%attr(0755,root,hadoop) %{_log_dir}
+%attr(0775,root,hadoop) %{_pid_dir}
+%config(noreplace) %{_conf_dir}/mapred-site.xml
+%config(noreplace) %{_conf_dir}/capacity-scheduler.xml
+%config(noreplace) %{_conf_dir}/fair-scheduler.xml
+%config(noreplace) %{_conf_dir}/mapred-queues.xml
+%config(noreplace) %{_conf_dir}/taskcontroller.cfg
+%{_prefix}
+%attr(0775,root,root) /etc/init.d/hadoop-jobtracker
+%attr(0775,root,root) /etc/init.d/hadoop-tasktracker
+

Added: hadoop/mapreduce/trunk/src/packages/templates/conf/mapred-site.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/templates/conf/mapred-site.xml?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/templates/conf/mapred-site.xml (added)
+++ hadoop/mapreduce/trunk/src/packages/templates/conf/mapred-site.xml Fri May 27 17:03:23 2011
@@ -0,0 +1,31 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>${HADOOP_JT_HOST}</value>
+  </property>
+
+  <property>
+    <name>mapred.system.dir</name>
+    <value>/user/mapred/system</value>
+  </property>
+
+  <property>
+    <name>mapred.local.dir</name>
+    <value>${HADOOP_MAPRED_DIR}</value>
+  </property>
+
+  <property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp</value>
+  </property>
+
+  <property>
+    <name>mapred.jobtracker.taskScheduler</name>
+    <value>${HADOOP_TASK_SCHEDULER}</value>
+  </property>
+</configuration>

Added: hadoop/mapreduce/trunk/src/packages/update-mapred-env.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/packages/update-mapred-env.sh?rev=1128394&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/packages/update-mapred-env.sh (added)
+++ hadoop/mapreduce/trunk/src/packages/update-mapred-env.sh Fri May 27 17:03:23 2011
@@ -0,0 +1,139 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script configures mapred-env.sh and symlinkis directories for 
+# relocating RPM locations.
+
+usage() {
+  echo "
+usage: $0 <parameters>
+  Required parameters:
+     --prefix=PREFIX             path to install into
+
+  Optional parameters:
+     --arch=i386                 OS Architecture
+     --bin-dir=PREFIX/bin        Executable directory
+     --conf-dir=/etc/hadoop      Configuration directory
+     --log-dir=/var/log/hadoop   Log directory
+     --pid-dir=/var/run          PID file location
+     --sbin-dir=PREFIX/sbin      System executable directory
+  "
+  exit 1
+}
+
+template_generator() {
+  cat $1 |
+  while read line ; do
+    while [[ "$line" =~ '(\$\{[a-zA-Z_][a-zA-Z_0-9]*\})' ]] ; do
+      LHS=${BASH_REMATCH[1]}
+      RHS="$(eval echo "\"$LHS\"")"
+      line=${line//$LHS/$RHS}
+    done
+    echo $line >> $2
+  done
+}
+
+OPTS=$(getopt \
+  -n $0 \
+  -o '' \
+  -l 'arch:' \
+  -l 'prefix:' \
+  -l 'bin-dir:' \
+  -l 'conf-dir:' \
+  -l 'lib-dir:' \
+  -l 'log-dir:' \
+  -l 'pid-dir:' \
+  -l 'sbin-dir:' \
+  -l 'uninstall' \
+  -- "$@")
+
+if [ $? != 0 ] ; then
+    usage
+fi
+
+eval set -- "${OPTS}"
+while true ; do
+  case "$1" in
+    --arch)
+      ARCH=$2 ; shift 2
+      ;;
+    --prefix)
+      PREFIX=$2 ; shift 2
+      ;;
+    --bin-dir)
+      BIN_DIR=$2 ; shift 2
+      ;;
+    --log-dir)
+      LOG_DIR=$2 ; shift 2
+      ;;
+    --lib-dir)
+      LIB_DIR=$2 ; shift 2
+      ;;
+    --conf-dir)
+      CONF_DIR=$2 ; shift 2
+      ;;
+    --pid-dir)
+      PID_DIR=$2 ; shift 2
+      ;;
+    --sbin-dir)
+      SBIN_DIR=$2 ; shift 2
+      ;;
+    --uninstall)
+      UNINSTALL=1; shift
+      ;;
+    --)
+      shift ; break
+      ;;
+    *)
+      echo "Unknown option: $1"
+      usage
+      exit 1
+      ;;
+  esac
+done
+
+for var in PREFIX; do
+  if [ -z "$(eval "echo \$$var")" ]; then
+    echo Missing param: $var
+    usage
+  fi
+done
+
+ARCH=${ARCH:-i386}
+BIN_DIR=${BIN_DIR:-$PREFIX/bin}
+CONF_DIR=${CONF_DIR:-$PREFIX/etc/hadoop}
+LIB_DIR=${LIB_DIR:-$PREFIX/lib}
+LOG_DIR=${LOG_DIR:-$PREFIX/var/log}
+PID_DIR=${PID_DIR:-$PREFIX/var/run}
+SBIN_DIR=${SBIN_DIR:-$PREFIX/sbin}
+UNINSTALL=${UNINSTALL:-0}
+
+if [ "${ARCH}" != "i386" ]; then
+  LIB_DIR=${LIB_DIR}64
+fi
+
+if [ "${UNINSTALL}" -ne "1" ]; then
+  mkdir -p ${LOG_DIR}
+  chown mapred:hadoop ${LOG_DIR}
+  chmod 755 ${LOG_DIR}
+
+  if [ ! -d ${PID_DIR} ]; then
+    mkdir -p ${PID_DIR}
+    chown root:hadoop ${PID_DIR}
+    chmod 775 ${PID_DIR}
+  fi
+fi

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/fs/DFSCIOTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/fs/DFSCIOTest.java?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/fs/DFSCIOTest.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/fs/DFSCIOTest.java Fri May 27 17:03:23 2011
@@ -448,7 +448,7 @@ public class DFSCIOTest extends TestCase
         }
 
         //Copy the executables over to the remote filesystem
-        String hadoopHome = System.getenv("HADOOP_HOME");
+        String hadoopHome = System.getenv("HADOOP_PREFIX");
         fs.copyFromLocalFile(new Path(hadoopHome + "/libhdfs/libhdfs.so." + HDFS_LIB_VERSION),
                              HDFS_SHLIB);
         fs.copyFromLocalFile(new Path(hadoopHome + "/libhdfs/hdfs_read"), HDFS_READ);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ReliabilityTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ReliabilityTest.java?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ReliabilityTest.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ReliabilityTest.java Fri May 27 17:03:23 2011
@@ -179,7 +179,7 @@ public class ReliabilityTest extends Con
   
   private String normalizeCommandPath(String command) {
     final String hadoopHome;
-    if ((hadoopHome = System.getenv("HADOOP_HOME")) != null) {
+    if ((hadoopHome = System.getenv("HADOOP_PREFIX")) != null) {
       command = hadoopHome + "/" + command;
     }
     return command;

Modified: hadoop/mapreduce/trunk/src/test/system/conf/system-test-mapred.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/system/conf/system-test-mapred.xml?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/system/conf/system-test-mapred.xml (original)
+++ hadoop/mapreduce/trunk/src/test/system/conf/system-test-mapred.xml Fri May 27 17:03:23 2011
@@ -103,7 +103,7 @@
   <description>
     Local file system path on gate way to cluster-controller binary including the binary name.
     To build the binary the following commands need to be executed:
-     % ant run-as -Drun-as.hadoop.home.dir=(HADOOP_HOME of setup cluster)
+     % ant run-as -Drun-as.hadoop.home.dir=(HADOOP_PREFIX of setup cluster)
      % cp build-fi/system/c++-build/runAs test.system.hdrc.multi-user.binary.path
     Location of the cluster is important security precaution.
     The binary should be owned by root and test user group permission should be set such a



Mime
View raw message