hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r1128394 [1/2] - in /hadoop/mapreduce/trunk: ./ bin/ ivy/ src/benchmarks/gridmix/ src/benchmarks/gridmix/javasort/ src/benchmarks/gridmix/maxent/ src/benchmarks/gridmix/monsterQuery/ src/benchmarks/gridmix/pipesort/ src/benchmarks/gridmix/s...
Date Fri, 27 May 2011 17:03:26 GMT
Author: omalley
Date: Fri May 27 17:03:23 2011
New Revision: 1128394

URL: http://svn.apache.org/viewvc?rev=1128394&view=rev
Log:
MAPREDUCE-2521. Create RPM and Debian packages for MapReduce. Changes 
deployment layout to be consistent across the binary tgz, rpm, and deb.
(Eric Yang via omalley)

Added:
    hadoop/mapreduce/trunk/src/packages/
    hadoop/mapreduce/trunk/src/packages/deb/
    hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/
    hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/conffile
    hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/control
    hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/postinst
    hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/postrm
    hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/preinst
    hadoop/mapreduce/trunk/src/packages/deb/hadoop.control/prerm
    hadoop/mapreduce/trunk/src/packages/deb/init.d/
    hadoop/mapreduce/trunk/src/packages/deb/init.d/hadoop-jobtracker
    hadoop/mapreduce/trunk/src/packages/deb/init.d/hadoop-tasktracker
    hadoop/mapreduce/trunk/src/packages/rpm/
    hadoop/mapreduce/trunk/src/packages/rpm/init.d/
    hadoop/mapreduce/trunk/src/packages/rpm/init.d/hadoop-jobtracker
    hadoop/mapreduce/trunk/src/packages/rpm/init.d/hadoop-tasktracker
    hadoop/mapreduce/trunk/src/packages/rpm/spec/
    hadoop/mapreduce/trunk/src/packages/rpm/spec/hadoop-mapred.spec
    hadoop/mapreduce/trunk/src/packages/templates/
    hadoop/mapreduce/trunk/src/packages/templates/conf/
    hadoop/mapreduce/trunk/src/packages/templates/conf/mapred-site.xml
    hadoop/mapreduce/trunk/src/packages/update-mapred-env.sh
Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/bin/mapred
    hadoop/mapreduce/trunk/bin/mapred-config.sh
    hadoop/mapreduce/trunk/bin/start-mapred.sh
    hadoop/mapreduce/trunk/bin/stop-mapred.sh
    hadoop/mapreduce/trunk/build.xml
    hadoop/mapreduce/trunk/ivy.xml
    hadoop/mapreduce/trunk/ivy/libraries.properties
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/README
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/generateData.sh
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/gridmix-env
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.large
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.medium
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.small
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/maxent/maxent.large
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.large
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.medium
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.small
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.large
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.medium
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.small
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.large
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.medium
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.small
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.large
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.medium
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.small
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.large
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.medium
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.small
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/README.gridmix2
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/generateGridmix2data.sh
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/gridmix-env-2
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/rungridmix_2
    hadoop/mapreduce/trunk/src/c++/librecordio/test/Makefile
    hadoop/mapreduce/trunk/src/c++/pipes/debug/pipes-default-script
    hadoop/mapreduce/trunk/src/contrib/block_forensics/client/BlockForensics.java
    hadoop/mapreduce/trunk/src/contrib/data_join/src/examples/org/apache/hadoop/contrib/utils/join/README.txt
    hadoop/mapreduce/trunk/src/contrib/fairscheduler/README
    hadoop/mapreduce/trunk/src/contrib/mrunit/src/java/org/apache/hadoop/mrunit/package.html
    hadoop/mapreduce/trunk/src/contrib/mumak/bin/mumak.sh
    hadoop/mapreduce/trunk/src/contrib/raid/README
    hadoop/mapreduce/trunk/src/contrib/raid/bin/start-raidnode-remote.sh
    hadoop/mapreduce/trunk/src/contrib/raid/bin/stop-raidnode-remote.sh
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/DumpTypedBytes.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/HadoopStreaming.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/LoadTypedBytes.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
    hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java
    hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/vaidya.sh
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/fair_scheduler.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/rumen.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/streaming.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/vaidya.xml
    hadoop/mapreduce/trunk/src/examples/python/compile
    hadoop/mapreduce/trunk/src/examples/python/pyAbacus/compile
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LinuxTaskController.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/fs/DFSCIOTest.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ReliabilityTest.java
    hadoop/mapreduce/trunk/src/test/system/conf/system-test-mapred.xml

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Fri May 27 17:03:23 2011
@@ -14,6 +14,10 @@ Trunk (unreleased changes)
     MAPREDUCE-461. Enable ServicePlugins for the JobTracker.
     (Fredrik Hedberg via tomwhite)
 
+    MAPREDUCE-2521. Create RPM and Debian packages for MapReduce. Changes 
+    deployment layout to be consistent across the binary tgz, rpm, and deb.
+    (Eric Yang via omalley)
+
   IMPROVEMENTS
 
     MAPREDUCE-2517. [Gridmix] Add system tests to Gridmix. 

Modified: hadoop/mapreduce/trunk/bin/mapred
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/bin/mapred?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/bin/mapred (original)
+++ hadoop/mapreduce/trunk/bin/mapred Fri May 27 17:03:23 2011
@@ -15,10 +15,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-bin=`dirname "$0"`
+bin=`which $0`
+bin=`dirname ${bin}`
 bin=`cd "$bin"; pwd`
 
-. $bin/mapred-config.sh
+. $bin/../libexec/mapred-config.sh
 
 function print_usage(){
   echo "Usage: mapred [--config confdir] COMMAND"
@@ -85,10 +86,10 @@ if [ -d "$HADOOP_MAPRED_HOME/build/tools
 fi
 
 # for releases, add core mapred jar & webapps to CLASSPATH
-if [ -d "$HADOOP_MAPRED_HOME/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME
+if [ -d "$HADOOP_PREFIX/share/hadoop/mapreduce/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/mapreduce
 fi
-for f in $HADOOP_MAPRED_HOME/hadoop-mapred-*.jar; do
+for f in $HADOOP_MAPRED_HOME/share/hadoop-mapreduce/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 

Modified: hadoop/mapreduce/trunk/bin/mapred-config.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/bin/mapred-config.sh?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/bin/mapred-config.sh (original)
+++ hadoop/mapreduce/trunk/bin/mapred-config.sh Fri May 27 17:03:23 2011
@@ -18,17 +18,14 @@
 # included in all the mapred scripts with source command
 # should not be executed directly
 
-bin=`dirname "$0"`
+bin=`which "$0"`
+bin=`dirname "${bin}"`
 bin=`cd "$bin"; pwd`
 
-export HADOOP_MAPRED_HOME="${HADOOP_MAPRED_HOME:-$bin/..}"
-
-if [ -d "${HADOOP_COMMON_HOME}" ]; then
-  . "$HADOOP_COMMON_HOME"/bin/hadoop-config.sh
-elif [ -d "${HADOOP_HOME}" ]; then
-  . "$HADOOP_HOME"/bin/hadoop-config.sh
-elif [ -e "${HADOOP_MAPRED_HOME}"/bin/hadoop-config.sh ]; then
-  . "$HADOOP_MAPRED_HOME"/bin/hadoop-config.sh
+if [ -d "${bin}" ]; then
+  . "$bin"/../libexec/hadoop-config.sh
+elif [ -e "${HADOOP_PREFIX}"/bin/hadoop-config.sh ]; then
+  . "$HADOOP_MAPRED_PREFIX"/bin/hadoop-config.sh
 else
   echo "Hadoop common not found."
   exit

Modified: hadoop/mapreduce/trunk/bin/start-mapred.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/bin/start-mapred.sh?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/bin/start-mapred.sh (original)
+++ hadoop/mapreduce/trunk/bin/start-mapred.sh Fri May 27 17:03:23 2011
@@ -21,9 +21,9 @@
 bin=`dirname "${BASH_SOURCE-$0}"`
 bin=`cd "$bin"; pwd`
 
-. $bin/mapred-config.sh
+. $bin/../libexec/mapred-config.sh
 
 # start mapred daemons
 # start jobtracker first to minimize connection errors at startup
-"$HADOOP_COMMON_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/mapred start jobtracker
-"$HADOOP_COMMON_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/mapred start tasktracker
\ No newline at end of file
+"$HADOOP_PREFIX"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/mapred start jobtracker
+"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/mapred start tasktracker

Modified: hadoop/mapreduce/trunk/bin/stop-mapred.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/bin/stop-mapred.sh?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/bin/stop-mapred.sh (original)
+++ hadoop/mapreduce/trunk/bin/stop-mapred.sh Fri May 27 17:03:23 2011
@@ -21,7 +21,7 @@
 bin=`dirname "${BASH_SOURCE-$0}"`
 bin=`cd "$bin"; pwd`
 
-. $bin/mapred-config.sh
+. $bin/../libexec/mapred-config.sh
 
-"$HADOOP_COMMON_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/mapred stop jobtracker
-"$HADOOP_COMMON_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/mapred stop tasktracker
\ No newline at end of file
+"$HADOOP_PREFIX"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/mapred stop jobtracker
+"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/mapred stop tasktracker

Modified: hadoop/mapreduce/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/build.xml?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/build.xml (original)
+++ hadoop/mapreduce/trunk/build.xml Fri May 27 17:03:23 2011
@@ -27,16 +27,19 @@
   <!-- to contribute (without having to type -D or edit this file -->
   <property file="${user.home}/build.properties" />
   <property file="${basedir}/build.properties" />
- 
+
+  <property name="module" value="mapreduce"/> 
   <property name="Name" value="Hadoop-Mapred"/>
-  <property name="name" value="hadoop-mapred"/>
+  <property name="name" value="hadoop-${module}"/>
   <!-- Need to change aop.xml project.version prop. synchronously -->
-  <property name="version" value="0.23.0-SNAPSHOT"/>
+  <property name="_version" value="0.23.0"/>
+  <property name="version" value="${_version}-SNAPSHOT"/>
   <property name="final.name" value="${name}-${version}"/>
   <property name="test.final.name" value="${name}-test-${version}"/>
   <property name="examples.final.name" value="${name}-examples-${version}"/>
   <property name="tools.final.name" value="${name}-tools-${version}"/>
   <property name="year" value="2009"/>
+  <property name="package.release" value="1"/>
 
   <property name="src.dir" value="${basedir}/src"/>  	
   <property name="mapred.src.dir" value="${src.dir}/java"/> 
@@ -251,11 +254,21 @@
     <equals arg1="${repo}" arg2="staging"/>
   </condition>
 
+  <!-- packaging properties -->
+  <property name="package.prefix" value="/usr"/>
+  <property name="package.conf.dir" value="/etc/hadoop"/>
+  <property name="package.log.dir" value="/var/log/hadoop/mapred"/>
+  <property name="package.pid.dir" value="/var/run/hadoop"/>
+  <property name="package.var.dir" value="/var/lib/hadoop"/>
+  <property name="package.share.dir" value="/share/hadoop/${module}"/>
+  <!-- Use fixed path to build rpm for avoiding rpmbuild conflict with dash path names -->
+  <property name="package.buildroot" value="/tmp/hadoop_mapred_package_build_${user.name}"/>
+  <property name="package.build.dir" value="/tmp/hadoop_mapred_package_build_${user.name}/BUILD"/>
+
   <!-- the normal classpath -->
   <path id="classpath">
     <pathelement location="${build.classes}"/>
     <pathelement location="${conf.dir}"/>
-    <path refid="ivy-common.classpath"/>
     <path refid="ivy-mapred.classpath"/>
   </path>
 
@@ -1281,17 +1294,28 @@
   <target name="bin-package" depends="compile, jar, examples, tools, jar-test, package-librecordio" 
 		description="assembles artifacts for binary target">
     <mkdir dir="${dist.dir}"/>
+    <mkdir dir="${dist.dir}/include"/>
     <mkdir dir="${dist.dir}/lib"/>
-    <mkdir dir="${dist.dir}/contrib"/>
+    <mkdir dir="${dist.dir}/${package.share.dir}/contrib"/>
+    <mkdir dir="${dist.dir}/${package.share.dir}/lib"/>
+    <mkdir dir="${dist.dir}/${package.share.dir}/templates"/>
     <mkdir dir="${dist.dir}/bin"/>
+    <mkdir dir="${dist.dir}/sbin"/>
 
-    <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
+    <!-- enable this if there is mapred specific dependencies
+    <copy todir="${dist.dir}/${package.share.dir}/lib" includeEmptyDirs="false" flatten="true">
       <fileset dir="${mapred.ivy.lib.dir}"/>
+    </copy> -->
+
+    <copy todir="${dist.dir}/include" includeEmptyDirs="false">
+      <fileset dir="${build.dir}/c++/${build.platform}/include">
+        <include name="**"/>
+      </fileset>
     </copy>
 
     <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
-      <fileset dir="lib">
-        <exclude name="**/native/**"/>
+      <fileset dir="${build.dir}/c++/${build.platform}/lib">
+        <include name="**"/>
       </fileset>
     </copy>
 
@@ -1299,47 +1323,66 @@
       <!--Pass down the version in case its needed again and the target
       distribution directory so contribs know where to install to.-->
       <property name="version" value="${version}"/>
-      <property name="dist.dir" value="${dist.dir}"/>
+      <property name="dist.dir" value="${dist.dir}/${package.share.dir}"/>
       <fileset file="${contrib.dir}/build.xml"/>
     </subant>  	
 
-    <copy todir="${dist.dir}/webapps">
-      <fileset dir="${build.webapps}"/>
-    </copy>
-
-    <copy todir="${dist.dir}"> 
+    <copy todir="${dist.dir}/${package.share.dir}"> 
       <fileset file="${build.dir}/${final.name}*.jar"/>
- <!-- <fileset file="${build.dir}/${test.final.name}.jar"/>
+      <fileset file="${build.dir}/${test.final.name}.jar"/>
       <fileset file="${build.dir}/${examples.final.name}.jar"/>
-      <fileset file="${build.dir}/${tools.final.name}.jar"/> -->
+      <fileset file="${build.dir}/${tools.final.name}.jar"/>
     </copy>
-    
+
     <copy todir="${dist.dir}/bin">
-      <fileset dir="bin"/>
+      <fileset dir="bin">
+        <include name="mapred"/>
+      </fileset>
     </copy>
+
+    <copy todir="${dist.dir}/libexec">
+      <fileset dir="bin">
+        <include name="mapred-config.sh"/>
+      </fileset>
+    </copy>
+
+    <copy todir="${dist.dir}/sbin">
+      <fileset dir="bin">
+        <include name="start-*.sh"/>
+        <include name="stop-*.sh"/>
+      </fileset>
+    </copy>
+
+    <copy file="${basedir}/src/packages/update-mapred-env.sh" tofile="${dist.dir}/sbin/update-mapred-env.sh"/>
+    <copy file="${basedir}/src/packages/rpm/init.d/hadoop-jobtracker" tofile="${dist.dir}/sbin/hadoop-jobtracker.redhat"/>
+    <copy file="${basedir}/src/packages/rpm/init.d/hadoop-tasktracker" tofile="${dist.dir}/sbin/hadoop-tasktracker.redhat"/>
+    <copy file="${basedir}/src/packages/deb/init.d/hadoop-jobtracker" tofile="${dist.dir}/sbin/hadoop-jobtracker.debian"/>
+    <copy file="${basedir}/src/packages/deb/init.d/hadoop-tasktracker" tofile="${dist.dir}/sbin/hadoop-tasktracker.debian"/>
+    
+    <copy file="${basedir}/src/packages/update-mapred-env.sh" tofile="${dist.dir}/sbin/update-mapred-env.sh"/>
       	
-    <copy todir="${dist.dir}/conf">
+    <copy todir="${dist.dir}/etc/hadoop">
       <fileset dir="${conf.dir}" excludes="**/*.template"/>
     </copy>
 
-    <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>
+    <copy todir="${dist.dir}/${package.share.dir}/templates">
+      <fileset dir="${basedir}/src/packages/templates/conf" includes="*"/>
+    </copy>
 
-    <copy todir="${dist.dir}/ivy">
-      <fileset dir="ivy"/>
+    <copy todir="${dist.dir}/${package.share.dir}/webapps">
+      <fileset dir="${build.webapps}"/>
     </copy>
 
-    <copy todir="${dist.dir}">
+    <copy todir="${dist.dir}/share/doc/hadoop/${module}">
       <fileset dir=".">
         <include name="*.txt" />
       </fileset>
     </copy>
-  	
-    <copy todir="${dist.dir}/c++" includeEmptyDirs="false">
-      <fileset dir="${build.dir}/c++"/>
-    </copy>
-
-    <copy todir="${dist.dir}/" file="build.xml"/>
 
+    <chmod perm="ugo+x" type="file" parallel="false">
+        <fileset dir="${dist.dir}/bin"/>
+        <fileset dir="${dist.dir}/sbin"/>
+    </chmod>  	
   </target>
 
   <target name="binary-system" depends="bin-package, jar-system, jar-test-system"
@@ -1397,6 +1440,114 @@
     </macro_tar>
   </target>
 
+  <target name="rpm" depends="binary" description="Make rpm package">
+    <mkdir dir="${package.buildroot}/BUILD" />
+    <mkdir dir="${package.buildroot}/RPMS" />
+    <mkdir dir="${package.buildroot}/SRPMS" />
+    <mkdir dir="${package.buildroot}/SOURCES" />
+    <mkdir dir="${package.buildroot}/SPECS" />
+    <copy todir="${package.buildroot}/SOURCES">
+      <fileset dir="${build.dir}">
+        <include name="${final.name}-bin.tar.gz" />
+      </fileset>
+    </copy>
+    <copy file="${src.dir}/packages/rpm/spec/hadoop-mapred.spec" todir="${package.buildroot}/SPECS">
+      <filterchain>
+        <replacetokens>
+          <token key="final.name" value="${final.name}" />
+          <token key="version" value="${_version}" />
+          <token key="package.release" value="${package.release}" />
+          <token key="package.build.dir" value="${package.build.dir}" />
+          <token key="package.prefix" value="${package.prefix}" />
+          <token key="package.conf.dir" value="${package.conf.dir}" />
+          <token key="package.log.dir" value="${package.log.dir}" />
+          <token key="package.pid.dir" value="${package.pid.dir}" />
+          <token key="package.var.dir" value="${package.var.dir}" />
+        </replacetokens>
+      </filterchain>
+    </copy>
+    <rpm specFile="hadoop-mapred.spec" command="-bb --target ${os.arch}" topDir="${package.buildroot}" cleanBuildDir="true" failOnError="true"/>
+    <copy todir="${build.dir}/" flatten="true">
+      <fileset dir="${package.buildroot}/RPMS">
+        <include name="**/*.rpm" />
+      </fileset>
+    </copy>
+    <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
+  </target>
+
+  <target name="deb" depends="ivy-retrieve-package, binary" description="Make deb package">
+    <taskdef name="deb"
+           classname="org.vafer.jdeb.ant.DebAntTask">
+      <classpath refid="ivy-package.classpath" />
+    </taskdef>
+
+    <mkdir dir="${package.build.dir}/hadoop.control" />
+    <mkdir dir="${package.buildroot}/${package.prefix}" />
+    <copy todir="${package.buildroot}/${package.prefix}">
+      <fileset dir="${build.dir}/${final.name}">
+        <include name="**" />
+      </fileset>
+    </copy>
+    <copy todir="${package.build.dir}/hadoop.control">
+      <fileset dir="${src.dir}/packages/deb/hadoop.control">
+        <exclude name="control" />
+      </fileset>
+    </copy>
+    <copy file="${src.dir}/packages/deb/hadoop.control/control" todir="${package.build.dir}/hadoop.control">
+      <filterchain>
+        <replacetokens>
+          <token key="final.name" value="${final.name}" />
+          <token key="version" value="${_version}" />
+          <token key="package.release" value="${package.release}" />
+          <token key="package.build.dir" value="${package.build.dir}" />
+          <token key="package.prefix" value="${package.prefix}" />
+          <token key="package.conf.dir" value="${package.conf.dir}" />
+          <token key="package.log.dir" value="${package.log.dir}" />
+          <token key="package.pid.dir" value="${package.pid.dir}" />
+        </replacetokens>
+      </filterchain>
+    </copy>
+    <deb destfile="${package.buildroot}/${name}_${_version}-${package.release}_${os.arch}.deb" control="${package.build.dir}/hadoop.control">
+      <tarfileset dir="${build.dir}/${final.name}" filemode="644" prefix="${package.prefix}">
+        <exclude name="bin/*" />
+        <exclude name="${package.share.dir}/contrib/*/bin/*" />
+        <exclude name="etc" />
+        <exclude name="etc/**" />
+        <exclude name="libexec/*" />
+        <exclude name="sbin/*" />
+        <include name="**" />
+      </tarfileset>
+      <tarfileset dir="${build.dir}/${final.name}" filemode="755" prefix="${package.prefix}">
+        <include name="bin/*" />
+        <include name="sbin/*" />
+        <exclude name="sbin/*.redhat" />
+        <exclude name="sbin/*.debian" />
+        <include name="libexec/*" />
+        <include name="${package.share.dir}/contrib/*/bin/*" />
+      </tarfileset>
+      <tarfileset dir="${src.dir}/packages" filemode="755" prefix="${package.prefix}/sbin">
+        <include name="*.sh" />
+      </tarfileset>
+      <tarfileset dir="${build.dir}/${final.name}/etc/hadoop" filemode="644" prefix="${package.conf.dir}">
+        <exclude name="configuration.xsl" />
+        <exclude name="hadoop-metrics2.properties" />
+        <exclude name="core-site.xml" />
+        <exclude name="hdfs-site.xml" />
+        <exclude name="mapred-site.xml" />
+        <include name="**" />
+      </tarfileset>
+      <tarfileset dir="${basedir}/src/packages/deb/init.d" filemode="755" prefix="/etc/init.d">
+        <include name="**" />
+      </tarfileset>
+    </deb>
+    <copy todir="${build.dir}/" flatten="true">
+      <fileset dir="${package.buildroot}">
+        <include name="**/${name}*.deb" />
+      </fileset>
+    </copy>
+    <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
+  </target>
+
   <target name="ant-task-download" description="To download mvn-ant-task">
     <get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/>
   </target>
@@ -1684,8 +1835,8 @@
   </target>
 
   <target name="package-librecordio" depends="compile-librecordio" if="librecordio">
-    <mkdir dir="${dist.dir}/librecordio"/> 
-    <copy todir="${dist.dir}/librecordio">
+    <mkdir dir="${dist.dir}/lib"/> 
+    <copy todir="${dist.dir}/lib">
        <fileset dir="${build.librecordio}" casesensitive="yes" followsymlinks="false">
           <exclude name="**/tests/**"/>
           <exclude name="*.so"/> 
@@ -1693,7 +1844,7 @@
        </fileset>
     </copy>
     <chmod perm="ugo+x" type="file">
-       <fileset dir="${dist.dir}/librecordio"/>
+       <fileset dir="${dist.dir}/lib"/>
     </chmod>
   </target>
  
@@ -2071,7 +2222,7 @@
 
 
   <property name="ivyresolvelog" value="download-only"/>
-  <property name="ivyretrievelog" value="quite"/>
+  <property name="ivyretrievelog" value="quiet"/>
 
   <target name="ivy-init" depends="ivy-init-antlib" >
 
@@ -2106,6 +2257,11 @@
       log="${ivyresolvelog}"/>
   </target>
 
+  <target name="ivy-resolve-package" depends="ivy-init">
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="package"
+        log="${ivyresolvelog}"/>
+  </target>
+
   <target name="ivy-resolve-mapred" depends="ivy-init">
     <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="mapred"
       log="${ivyresolvelog}"/>
@@ -2173,6 +2329,14 @@
     <ivy:cachepath pathid="ivy-common.classpath" conf="common"/>
   </target>
 
+  <target name="ivy-retrieve-package" depends="ivy-resolve-package"
+    description="Retrieve Ivy-managed artifacts for the package configurations">
+    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+                log="${ivyretrievelog}"/>
+    <ivy:cachepath pathid="ivy-package.classpath" conf="package"/>
+  </target>
+
   <target name="ivy-retrieve-mapred" depends="ivy-resolve-mapred"
     description="Retrieve Ivy-managed artifacts for the mapred configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"

Modified: hadoop/mapreduce/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/ivy.xml?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/ivy.xml (original)
+++ hadoop/mapreduce/trunk/ivy.xml Fri May 27 17:03:23 2011
@@ -40,6 +40,7 @@
     <conf name="mapred" visibility="private" extends="compile,runtime" description="Mapred dependent artifacts"/>
     <conf name="javadoc" visibility="private" description="artiracts required while performing doc generation" extends="common"/>
     <conf name="test" extends="master" visibility="private" description="the classpath needed to run tests"/>
+    <conf name="package" extends="master" description="the classpath needed for packaging"/>
     <conf name="system" extends="test" visibility="private" description="the classpath needed to run system tests"/>
 
     <conf name="test-hdfswithmr" extends="test" visibility="private" description="the classpath needed to run tests"/>
@@ -57,22 +58,22 @@
   </publications>
  <dependencies>
    <dependency org="org.apache.hadoop" name="hadoop-common" 
-               rev="${hadoop-common.version}" conf="common->default"/> 
+               rev="${hadoop-common.version}" conf="compile->default"/> 
    <dependency org="org.apache.hadoop" name="hadoop-common-test" 
-               rev="${hadoop-common.version}" conf="common->default"/> 
+               rev="${hadoop-common.version}" conf="compile->default"/> 
    <dependency org="org.apache.hadoop" name="hadoop-hdfs" 
-               rev="${hadoop-hdfs.version}" conf="common->default"/> 
+               rev="${hadoop-hdfs.version}" conf="compile->default"/> 
    <dependency org="org.apache.hadoop" name="hadoop-common-instrumented"
                rev="${hadoop-common.version}" conf="system->default"/>
    <dependency org="org.apache.hadoop" name="hadoop-hdfs-instrumented"
                rev="${hadoop-common.version}" conf="system->default"/>
    <dependency org="commons-logging" name="commons-logging" 
-               rev="${commons-logging.version}" conf="common->master"/>
+               rev="${commons-logging.version}" conf="compile->master"/>
    <dependency org="log4j" name="log4j" rev="${log4j.version}" 
-               conf="common->master"/>
+               conf="compile->master"/>
 
    <dependency org="org.slf4j" name="slf4j-api" rev="${slf4j-api.version}" 
-               conf="common->master"/>
+               conf="compile->master"/>
    <dependency org="org.slf4j" name="slf4j-log4j12" 
                rev="${slf4j-log4j12.version}" conf="mapred->master"/>
    <dependency org="org.apache.hadoop" name="hadoop-common-test" 
@@ -99,20 +100,21 @@
    <dependency org="org.apache.lucene" name="lucene-core" 
                rev="${lucene-core.version}" conf="javadoc->default"/>
    <dependency org="org.apache.hadoop" name="avro" rev="${avro.version}" 
-               conf="common->default">
+               conf="compile->default">
       <exclude module="ant"/>
       <exclude module="jetty"/>
       <exclude module="slf4j-simple"/>
     </dependency>
    <dependency org="org.mockito" name="mockito-all" rev="${mockito-all.version}" 
                conf="test->default"/>
+   <dependency org="org.vafer" name="jdeb" rev="${jdeb.version}" conf="package->master"/>
    <dependency org="org.mortbay.jetty" name="jetty-servlet-tester" rev="${jetty.version}"
                conf="test->default"/>
    <!-- dependency addition for the fault injection -->
    <dependency org="org.aspectj" name="aspectjrt" rev="${aspectj.version}"
-               conf="common->default"/>
+               conf="compile->default"/>
    <dependency org="org.aspectj" name="aspectjtools" rev="${aspectj.version}"
-               conf="common->default"/>
+               conf="compile->default"/>
 
  </dependencies>
   

Modified: hadoop/mapreduce/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/ivy/libraries.properties?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/ivy/libraries.properties (original)
+++ hadoop/mapreduce/trunk/ivy/libraries.properties Fri May 27 17:03:23 2011
@@ -49,6 +49,7 @@ hsqldb.version=1.8.0.10
 ivy.version=2.1.0
 
 jasper.version=5.5.12
+jdeb.version=0.8
 jsp.version=2.1
 jsp-api.version=5.5.12
 jets3t.version=0.7.1

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/README
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/README?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/README (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/README Fri May 27 17:03:23 2011
@@ -89,20 +89,20 @@ spills.
   > ant -Dcompile.c++=yes examples
 2) Copy the pipe sort example to a location in the default filesystem
    (usually HDFS, default /gridmix/programs)
-  > $HADOOP_HOME/hadoop dfs -mkdir $GRID_MIX_PROG
-  > $HADOOP_HOME/hadoop dfs -put build/c++-examples/$PLATFORM_STR/bin/pipes-sort $GRID_MIX_PROG
+  > $HADOOP_PREFIX/hadoop dfs -mkdir $GRID_MIX_PROG
+  > $HADOOP_PREFIX/hadoop dfs -put build/c++-examples/$PLATFORM_STR/bin/pipes-sort $GRID_MIX_PROG
 
 1.1 Configure
 
 One must modify hadoop-env to supply the following information:
 
-HADOOP_HOME     The hadoop install location
+HADOOP_PREFIX     The hadoop install location
 GRID_MIX_HOME   The location of these scripts
 APP_JAR         The location of the hadoop example
 GRID_MIX_DATA   The location of the datsets for these benchmarks
 GRID_MIX_PROG   The location of the pipe-sort example
 
-Reasonable defaults are provided for all but HADOOP_HOME. The datasets used
+Reasonable defaults are provided for all but HADOOP_PREFIX. The datasets used
 by each of the respective benchmarks are recorded in the Input::hadoop-env
 comment in section 0 and their location may be changed in hadoop-env. Note
 that each job expects particular input data and the parameters given to it

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/generateData.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/generateData.sh?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/generateData.sh (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/generateData.sh Fri May 27 17:03:23 2011
@@ -38,7 +38,7 @@ if [ ! -z ${USE_REAL_DATASET} ] ; then
   INDIRECT_DATA_BYTES=58720256000 
 fi
 
-${HADOOP_HOME}/bin/hadoop jar \
+${HADOOP_PREFIX}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
   -D mapreduce.randomtextwriter.totalbytes=${COMPRESSED_DATA_BYTES} \
   -D mapreduce.randomtextwriter.bytespermap=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
@@ -51,7 +51,7 @@ ${HADOOP_HOME}/bin/hadoop jar \
   -outFormat org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat \
   ${VARCOMPSEQ} &
 
-${HADOOP_HOME}/bin/hadoop jar \
+${HADOOP_PREFIX}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
   -D mapreduce.randomtextwriter.totalbytes=${COMPRESSED_DATA_BYTES} \
   -D mapreduce.randomtextwriter.bytespermap=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
@@ -64,7 +64,7 @@ ${HADOOP_HOME}/bin/hadoop jar \
   -outFormat org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat \
   ${FIXCOMPSEQ} &
 
-${HADOOP_HOME}/bin/hadoop jar \
+${HADOOP_PREFIX}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
   -D mapreduce.randomtextwriter.totalbytes=${UNCOMPRESSED_DATA_BYTES} \
   -D mapreduce.randomtextwriter.bytespermap=$((${UNCOMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
@@ -76,7 +76,7 @@ ${HADOOP_HOME}/bin/hadoop jar \
   -outFormat org.apache.hadoop.mapreduce.lib.output.TextOutputFormat \
   ${VARINFLTEXT} &
 
-${HADOOP_HOME}/bin/hadoop jar \
+${HADOOP_PREFIX}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
   -D mapreduce.randomtextwriter.totalbytes=${INDIRECT_DATA_BYTES} \
   -D mapreduce.randomtextwriter.bytespermap=$((${INDIRECT_DATA_BYTES} / ${INDIRECT_DATA_FILES})) \

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/gridmix-env
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/gridmix-env?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/gridmix-env (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/gridmix-env Fri May 27 17:03:23 2011
@@ -15,8 +15,8 @@
 ## Environment configuration
 # Hadoop installation
 # set var only if it has not already been set externally
-if [ -z "${HADOOP_HOME}" ] ; then
-  export HADOOP_HOME=
+if [ -z "${HADOOP_PREFIX}" ] ; then
+  export HADOOP_PREFIX=
 fi
 # Base directory for gridmix install
 # set var only if it has not already been set externally
@@ -26,17 +26,17 @@ fi
 # Hadoop example jar
 # set var only if it has not already been set externally
 if [ -z "${EXAMPLE_JAR}" ] ; then
-  export EXAMPLE_JAR="${HADOOP_HOME}/hadoop-*examples.jar"
+  export EXAMPLE_JAR="${HADOOP_PREFIX}/hadoop-*examples.jar"
 fi
 # Hadoop test jar
 # set var only if it has not already been set externally
 if [ -z "${APP_JAR}" ] ; then
-  export APP_JAR="${HADOOP_HOME}/hadoop-*test.jar"
+  export APP_JAR="${HADOOP_PREFIX}/hadoop-*test.jar"
 fi
 # Hadoop streaming jar
 # set var only if it has not already been set externally
 if [ -z "${STREAM_JAR}" ] ; then
-  export STREAM_JAR="${HADOOP_HOME}/contrib/streaming/hadoop-*streaming.jar"
+  export STREAM_JAR="${HADOOP_PREFIX}/contrib/streaming/hadoop-*streaming.jar"
 fi
 # Location on default filesystem for writing gridmix data (usually HDFS)
 # Default: /gridmix/data

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.large
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.large?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.large (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.large Fri May 27 17:03:23 2011
@@ -19,7 +19,7 @@ INDIR=${VARINFLTEXT}
 
 Date=`date +%F-%H-%M-%S-%N`
 OUTDIR=perf-out/sort-out-dir-large_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar ${EXAMPLE_JAR} sort -m 1 -r $NUM_OF_REDUCERS_FOR_LARGE_JOB -inFormat org.apache.hadoop.mapred.KeyValueTextInputFormat -outFormat org.apache.hadoop.mapred.TextOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text $INDIR $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop jar ${EXAMPLE_JAR} sort -m 1 -r $NUM_OF_REDUCERS_FOR_LARGE_JOB -inFormat org.apache.hadoop.mapred.KeyValueTextInputFormat -outFormat org.apache.hadoop.mapred.TextOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text $INDIR $OUTDIR
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.medium
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.medium?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.medium (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.medium Fri May 27 17:03:23 2011
@@ -19,7 +19,7 @@ INDIR="${VARINFLTEXT}/{part-000*0,part-0
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/sort-out-dir-medium_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar ${EXAMPLE_JAR} sort -m 1 -r $NUM_OF_REDUCERS_FOR_MEDIUM_JOB -inFormat org.apache.hadoop.mapred.KeyValueTextInputFormat -outFormat org.apache.hadoop.mapred.TextOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text $INDIR $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop jar ${EXAMPLE_JAR} sort -m 1 -r $NUM_OF_REDUCERS_FOR_MEDIUM_JOB -inFormat org.apache.hadoop.mapred.KeyValueTextInputFormat -outFormat org.apache.hadoop.mapred.TextOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text $INDIR $OUTDIR
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.small
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.small?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.small (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/javasort/text-sort.small Fri May 27 17:03:23 2011
@@ -19,7 +19,7 @@ INDIR="${VARINFLTEXT}/{part-00000,part-0
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/sort-out-dir-small_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar ${EXAMPLE_JAR} sort -m 1 -r $NUM_OF_REDUCERS_FOR_SMALL_JOB -inFormat org.apache.hadoop.mapred.KeyValueTextInputFormat -outFormat org.apache.hadoop.mapred.TextOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text $INDIR $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop jar ${EXAMPLE_JAR} sort -m 1 -r $NUM_OF_REDUCERS_FOR_SMALL_JOB -inFormat org.apache.hadoop.mapred.KeyValueTextInputFormat -outFormat org.apache.hadoop.mapred.TextOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text $INDIR $OUTDIR
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/maxent/maxent.large
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/maxent/maxent.large?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/maxent/maxent.large (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/maxent/maxent.large Fri May 27 17:03:23 2011
@@ -20,18 +20,18 @@ INDIR=${FIXCOMPTEXT}
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/maxent-out-dir-large_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 50 -keepred 100 -inFormatIndirect org.apache.hadoop.mapred.TextInputFormat -outFormat org.apache.hadoop.mapred.TextOutputFormat -outKey org.apache.hadoop.io.LongWritable -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR.1 -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 50 -keepred 100 -inFormatIndirect org.apache.hadoop.mapred.TextInputFormat -outFormat org.apache.hadoop.mapred.TextOutputFormat -outKey org.apache.hadoop.io.LongWritable -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR.1 -r $NUM_OF_REDUCERS
 
 ITER=7
 for ((i=1; i<$ITER; ++i))
 do
-  ${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 50 -keepred 100 -inFormatIndirect org.apache.hadoop.mapred.TextInputFormat -outFormat org.apache.hadoop.mapred.TextOutputFormat -outKey org.apache.hadoop.io.LongWritable -outValue org.apache.hadoop.io.Text -indir $INDIR -indir $OUTDIR.$i -outdir $OUTDIR.$(($i+1)) -r $NUM_OF_REDUCERS
+  ${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 50 -keepred 100 -inFormatIndirect org.apache.hadoop.mapred.TextInputFormat -outFormat org.apache.hadoop.mapred.TextOutputFormat -outKey org.apache.hadoop.io.LongWritable -outValue org.apache.hadoop.io.Text -indir $INDIR -indir $OUTDIR.$i -outdir $OUTDIR.$(($i+1)) -r $NUM_OF_REDUCERS
   if [ $? -ne "0" ]
     then exit $?
   fi
-  ${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR.$i
+  ${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR.$i
 done
 
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR.$ITER
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR.$ITER

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.large
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.large?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.large (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.large Fri May 27 17:03:23 2011
@@ -20,19 +20,19 @@ INDIR=${FIXCOMPSEQ}
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/mq-out-dir-large_$Date.1
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 10 -keepred 40 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 10 -keepred 40 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 
 INDIR=$OUTDIR
 OUTDIR=perf-out/mq-out-dir-large_$Date.2
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 77 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 77 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 
 INDIR=$OUTDIR
 OUTDIR=perf-out/mq-out-dir-large_$Date.3
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 116 -keepred 91 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 116 -keepred 91 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.medium
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.medium?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.medium (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.medium Fri May 27 17:03:23 2011
@@ -20,19 +20,19 @@ INDIR="${FIXCOMPSEQ}/{part-000*0,part-00
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/mq-out-dir-medium_$Date.1
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 10 -keepred 40 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 10 -keepred 40 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 
 INDIR=$OUTDIR
 OUTDIR=perf-out/mq-out-dir-medium_$Date.2
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 77 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 77 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 
 INDIR=$OUTDIR
 OUTDIR=perf-out/mq-out-dir-medium_$Date.3
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 116 -keepred 91 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 116 -keepred 91 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.small
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.small?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.small (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/monsterQuery/monster_query.small Fri May 27 17:03:23 2011
@@ -20,19 +20,19 @@ INDIR="${FIXCOMPSEQ}/{part-00000,part-00
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/mq-out-dir-small_$Date.1
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 10 -keepred 40 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 10 -keepred 40 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 
 INDIR=$OUTDIR
 OUTDIR=perf-out/mq-out-dir-small_$Date.2
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 77 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 77 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 
 INDIR=$OUTDIR
 OUTDIR=perf-out/mq-out-dir-small_$Date.3
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 116 -keepred 91 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 116 -keepred 91 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.large
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.large?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.large (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.large Fri May 27 17:03:23 2011
@@ -20,8 +20,8 @@ INDIR=${VARINFLTEXT}
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/pipe-out-dir-large_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
 
-${HADOOP_HOME}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapreduce.job.output.key.class=org.apache.hadoop.io.Text,mapreduce.job.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
+${HADOOP_PREFIX}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapreduce.job.output.key.class=org.apache.hadoop.io.Text,mapreduce.job.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.medium
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.medium?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.medium (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.medium Fri May 27 17:03:23 2011
@@ -20,8 +20,8 @@ INDIR="${VARINFLTEXT}/{part-000*0,part-0
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/pipe-out-dir-medium_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
 
-${HADOOP_HOME}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapreduce.job.output.key.class=org.apache.hadoop.io.Text,mapreduce.job.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
+${HADOOP_PREFIX}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapreduce.job.output.key.class=org.apache.hadoop.io.Text,mapreduce.job.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.small
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.small?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.small (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.small Fri May 27 17:03:23 2011
@@ -20,8 +20,8 @@ INDIR="${VARINFLTEXT}/{part-00000,part-0
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/pipe-out-dir-small_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
 
-${HADOOP_HOME}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapreduce.job.output.key.class=org.apache.hadoop.io.Text,mapreduce.job.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
+${HADOOP_PREFIX}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapreduce.job.output.key.class=org.apache.hadoop.io.Text,mapreduce.job.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.large
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.large?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.large (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.large Fri May 27 17:03:23 2011
@@ -20,8 +20,8 @@ export INDIR=${VARINFLTEXT}
 Date=`date +%F-%H-%M-%S-%N`
 
 export OUTDIR=perf-out/stream-out-dir-large_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
 
-${HADOOP_HOME}/bin/hadoop jar ${STREAM_JAR} -input $INDIR -output $OUTDIR -mapper cat -reducer cat -numReduceTasks $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar ${STREAM_JAR} -input $INDIR -output $OUTDIR -mapper cat -reducer cat -numReduceTasks $NUM_OF_REDUCERS
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.medium
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.medium?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.medium (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.medium Fri May 27 17:03:23 2011
@@ -20,8 +20,8 @@ INDIR="${VARINFLTEXT}/{part-000*0,part-0
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/stream-out-dir-medium_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
 
-${HADOOP_HOME}/bin/hadoop jar ${STREAM_JAR} -input $INDIR -output $OUTDIR -mapper cat -reducer cat -numReduceTasks $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar ${STREAM_JAR} -input $INDIR -output $OUTDIR -mapper cat -reducer cat -numReduceTasks $NUM_OF_REDUCERS
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.small
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.small?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.small (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/streamsort/text-sort.small Fri May 27 17:03:23 2011
@@ -20,8 +20,8 @@ INDIR="${VARINFLTEXT}/{part-00000,part-0
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/stream-out-dir-small_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
 
-${HADOOP_HOME}/bin/hadoop jar ${STREAM_JAR} -input $INDIR -output $OUTDIR -mapper cat -reducer cat -numReduceTasks $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar ${STREAM_JAR} -input $INDIR -output $OUTDIR -mapper cat -reducer cat -numReduceTasks $NUM_OF_REDUCERS
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.large
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.large?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.large (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.large Fri May 27 17:03:23 2011
@@ -20,6 +20,6 @@ INDIR=${VARCOMPSEQ}
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/webdata-scan-out-dir-large_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 0.2 -keepred 5 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 0.2 -keepred 5 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.medium
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.medium?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.medium (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.medium Fri May 27 17:03:23 2011
@@ -20,6 +20,6 @@ INDIR="${VARCOMPSEQ}/{part-000*0,part-00
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/webdata-scan-out-dir-medium_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar ${APP_JAR} loadgen -keepmap 1 -keepred 5 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar ${APP_JAR} loadgen -keepmap 1 -keepred 5 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.small
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.small?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.small (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatascan/webdata_scan.small Fri May 27 17:03:23 2011
@@ -20,6 +20,6 @@ INDIR="${VARCOMPSEQ}/{part-00000,part-00
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/webdata-scan-out-dir-small_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 1 -keepred 5 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 1 -keepred 5 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.large
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.large?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.large (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.large Fri May 27 17:03:23 2011
@@ -20,8 +20,8 @@ INDIR=${VARCOMPSEQ}/{part-000*0,part-000
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/webdata-sort-out-dir-large_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 100 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 100 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.medium
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.medium?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.medium (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.medium Fri May 27 17:03:23 2011
@@ -20,8 +20,8 @@ INDIR="${VARCOMPSEQ}/{part-0000,part-000
 Date=`date +%F-%H-%M-%S-%N`
 
 OUTDIR=perf-out/webdata-sort-out-dir-medium_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 100 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 100 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.small
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.small?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.small (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/webdatasort/webdata_sort.small Fri May 27 17:03:23 2011
@@ -20,8 +20,8 @@ INDIR=${VARCOMPSEQ}/part-00000
 Date=`date +%F-%H-%M-%S-%N`
 
 export OUTDIR=perf-out/webdata-sort-out-dir-small_$Date
-${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
+${HADOOP_PREFIX}/bin/hadoop dfs -rmr $OUTDIR
 
-${HADOOP_HOME}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 100 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
+${HADOOP_PREFIX}/bin/hadoop jar $APP_JAR loadgen -keepmap 100 -keepred 100 -inFormat org.apache.hadoop.mapred.SequenceFileInputFormat -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat -outKey org.apache.hadoop.io.Text -outValue org.apache.hadoop.io.Text -indir $INDIR -outdir $OUTDIR -r $NUM_OF_REDUCERS
 
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/README.gridmix2
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/README.gridmix2?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/README.gridmix2 (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/README.gridmix2 Fri May 27 17:03:23 2011
@@ -96,7 +96,7 @@ copy gridmix.jar to gridmix dir.
 
 One must modify gridmix-env-2 to set the following variables:
 
-HADOOP_HOME     The hadoop install location
+HADOOP_PREFIX     The hadoop install location
 HADOOP_VERSION  The exact hadoop version to be used. e.g. hadoop-0.18.2-dev
 HADOOP_CONF_DIR The dir containing the hadoop-site.xml for teh cluster to be used.
 USE_REAL_DATA   A large data-set will be created and used by the benchmark if it is set to true.

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/generateGridmix2data.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/generateGridmix2data.sh?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/generateGridmix2data.sh (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/generateGridmix2data.sh Fri May 27 17:03:23 2011
@@ -51,7 +51,7 @@ export VARINFLTEXT=${GRID_MIX_DATA}/Sort
 # Fixed length key, value compressed Text File
 export FIXCOMPTEXT=${GRID_MIX_DATA}/EntropySimulationCompressed
 
-${HADOOP_HOME}/bin/hadoop jar \
+${HADOOP_PREFIX}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
   -D mapreduce.randomtextwriter.totalbytes=${COMPRESSED_DATA_BYTES} \
   -D mapreduce.randomtextwriter.bytespermap=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
@@ -65,7 +65,7 @@ ${HADOOP_HOME}/bin/hadoop jar \
   ${VARCOMPSEQ} &
 
 
-${HADOOP_HOME}/bin/hadoop jar \
+${HADOOP_PREFIX}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
   -D mapreduce.randomtextwriter.totalbytes=${COMPRESSED_DATA_BYTES} \
   -D mapreduce.randomtextwriter.bytespermap=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
@@ -79,7 +79,7 @@ ${HADOOP_HOME}/bin/hadoop jar \
   ${FIXCOMPSEQ} &
 
 
-${HADOOP_HOME}/bin/hadoop jar \
+${HADOOP_PREFIX}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
   -D mapreduce.randomtextwriter.totalbytes=${UNCOMPRESSED_DATA_BYTES} \
   -D mapreduce.randomtextwriter.bytespermap=$((${UNCOMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/gridmix-env-2
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/gridmix-env-2?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/gridmix-env-2 (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/gridmix-env-2 Fri May 27 17:03:23 2011
@@ -23,13 +23,13 @@
 ## Environment configuration
 # Hadoop installation
 export HADOOP_VERSION=hadoop-0.18.2-dev
-export HADOOP_HOME=${HADOOP_INSTALL_HOME}/${HADOOP_VERSION}
+export HADOOP_PREFIX=${HADOOP_INSTALL_HOME}/${HADOOP_VERSION}
 export HADOOP_CONF_DIR=
 export USE_REAL_DATASET=TRUE
 
-export APP_JAR=${HADOOP_HOME}/${HADOOP_VERSION}-test.jar
-export EXAMPLE_JAR=${HADOOP_HOME}/${HADOOP_VERSION}-examples.jar
-export STREAMING_JAR=${HADOOP_HOME}/contrib/streaming/${HADOOP_VERSION}-streaming.jar
+export APP_JAR=${HADOOP_PREFIX}/${HADOOP_VERSION}-test.jar
+export EXAMPLE_JAR=${HADOOP_PREFIX}/${HADOOP_VERSION}-examples.jar
+export STREAMING_JAR=${HADOOP_PREFIX}/contrib/streaming/${HADOOP_VERSION}-streaming.jar
 
 
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/rungridmix_2
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/rungridmix_2?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/rungridmix_2 (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/rungridmix_2 Fri May 27 17:03:23 2011
@@ -30,7 +30,7 @@ echo $Date >  $1_start.out
 
 export HADOOP_CLASSPATH=${APP_JAR}:${EXAMPLE_JAR}:${STREAMING_JAR}
 export LIBJARS=${APP_JAR},${EXAMPLE_JAR},${STREAMING_JAR}
-${HADOOP_HOME}/bin/hadoop jar gridmix.jar org.apache.hadoop.mapreduce.GridMixRunner -libjars ${LIBJARS}
+${HADOOP_PREFIX}/bin/hadoop jar gridmix.jar org.apache.hadoop.mapreduce.GridMixRunner -libjars ${LIBJARS}
 
 Date=`date +%F-%H-%M-%S-%N`
 echo $Date >  $1_end.out

Modified: hadoop/mapreduce/trunk/src/c++/librecordio/test/Makefile
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/c%2B%2B/librecordio/test/Makefile?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/c++/librecordio/test/Makefile (original)
+++ hadoop/mapreduce/trunk/src/c++/librecordio/test/Makefile Fri May 27 17:03:23 2011
@@ -37,7 +37,7 @@ ${LIBRECORDIO_TEST_DIR}/test.jr.o: test.
 	g++ ${COPTS} -c -I..  -o ${LIBRECORDIO_TEST_DIR}/test.jr.o test.jr.cc
 
 %.jr.cc %.jr.hh: %.jr
-	${HADOOP_HOME}/bin/rcc --language c++ $<
+	${HADOOP_PREFIX}/bin/rcc --language c++ $<
 
 %: %.o
 %: %.cc

Modified: hadoop/mapreduce/trunk/src/c++/pipes/debug/pipes-default-script
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/c%2B%2B/pipes/debug/pipes-default-script?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/c++/pipes/debug/pipes-default-script (original)
+++ hadoop/mapreduce/trunk/src/c++/pipes/debug/pipes-default-script Fri May 27 17:03:23 2011
@@ -1,3 +1,3 @@
 core=`find . -name 'core*'`
 #Only pipes programs have 5th argument as program name.
-gdb -quiet $5 -c $core -x $HADOOP_HOME/src/c++/pipes/debug/pipes-default-gdb-commands.txt 
+gdb -quiet $5 -c $core -x $HADOOP_PREFIX/src/c++/pipes/debug/pipes-default-gdb-commands.txt 

Modified: hadoop/mapreduce/trunk/src/contrib/block_forensics/client/BlockForensics.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/block_forensics/client/BlockForensics.java?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/block_forensics/client/BlockForensics.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/block_forensics/client/BlockForensics.java Fri May 27 17:03:23 2011
@@ -72,7 +72,7 @@ public class BlockForensics {
   // runs hadoop command and prints output to stdout
   public static void runHadoopCmd(String ... args)
   throws IOException {
-    String hadoop_home = System.getenv("HADOOP_HOME");
+    String hadoop_home = System.getenv("HADOOP_PREFIX");
     
     List<String> l = new LinkedList<String>();
     l.add("bin/hadoop");
@@ -103,8 +103,8 @@ public class BlockForensics {
     throws SAXException, ParserConfigurationException, 
            InterruptedException, IOException {
 
-    if (System.getenv("HADOOP_HOME") == null) {
-      System.err.println("The environmental variable HADOOP_HOME is undefined");
+    if (System.getenv("HADOOP_PREFIX") == null) {
+      System.err.println("The environmental variable HADOOP_PREFIX is undefined");
       System.exit(1);
     }
 

Modified: hadoop/mapreduce/trunk/src/contrib/data_join/src/examples/org/apache/hadoop/contrib/utils/join/README.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/data_join/src/examples/org/apache/hadoop/contrib/utils/join/README.txt?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/data_join/src/examples/org/apache/hadoop/contrib/utils/join/README.txt (original)
+++ hadoop/mapreduce/trunk/src/contrib/data_join/src/examples/org/apache/hadoop/contrib/utils/join/README.txt Fri May 27 17:03:23 2011
@@ -20,7 +20,7 @@ B.a31   B.a32
 *****************************
 *** Invoke SampleDataJoin ***
 *****************************
-[:~]$ $HADOOP_HOME/bin/hadoop jar hadoop-datajoin-examples.jar org.apache.hadoop.contrib.utils.join.DataJoinJob datajoin/input datajoin/output Text 1 org.apache.hadoop.contrib.utils.join.SampleDataJoinMapper org.apache.hadoop.contrib.utils.join.SampleDataJoinReducer org.apache.hadoop.contrib.utils.join.SampleTaggedMapOutput Text
+[:~]$ $HADOOP_PREFIX/bin/hadoop jar hadoop-datajoin-examples.jar org.apache.hadoop.contrib.utils.join.DataJoinJob datajoin/input datajoin/output Text 1 org.apache.hadoop.contrib.utils.join.SampleDataJoinMapper org.apache.hadoop.contrib.utils.join.SampleDataJoinReducer org.apache.hadoop.contrib.utils.join.SampleTaggedMapOutput Text
 Using TextInputFormat: Text
 Using TextOutputFormat: Text
 07/06/01 19:58:23 INFO mapred.FileInputFormat: Total input paths to process : 2

Modified: hadoop/mapreduce/trunk/src/contrib/fairscheduler/README
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/fairscheduler/README?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/fairscheduler/README (original)
+++ hadoop/mapreduce/trunk/src/contrib/fairscheduler/README Fri May 27 17:03:23 2011
@@ -20,10 +20,10 @@ support for guaranteed shares and job li
 
 The functionality of this scheduler is described in the Forrest 
 documentation at http://hadoop.apache.org/core/ or alternatively, in the 
-hadoop release it can be found at $HADOOP_HOME/docs. In order to build the 
+hadoop release it can be found at $HADOOP_PREFIX/docs. In order to build the 
 documentation on your own from source please use the following command in 
 the downloaded source folder:
 
 ant docs -Dforrest.home=path to forrest -Djava5.home= path to jdk5. 
 
-The documentation so built would be under $HADOOP_HOME/build/docs
+The documentation so built would be under $HADOOP_PREFIX/build/docs

Modified: hadoop/mapreduce/trunk/src/contrib/mrunit/src/java/org/apache/hadoop/mrunit/package.html
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/mrunit/src/java/org/apache/hadoop/mrunit/package.html?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/mrunit/src/java/org/apache/hadoop/mrunit/package.html (original)
+++ hadoop/mapreduce/trunk/src/contrib/mrunit/src/java/org/apache/hadoop/mrunit/package.html Fri May 27 17:03:23 2011
@@ -40,7 +40,7 @@ and Reducer implementations.</p></div>
 </div>
 <h2 id="_getting_started_with_mrunit">Getting Started with MRUnit</h2>
 <div class="sectionbody">
-<div class="paragraph"><p>MRUnit is compiled as a jar and resides in <tt>$HADOOP_HOME/contrib/mrunit</tt>.
+<div class="paragraph"><p>MRUnit is compiled as a jar and resides in <tt>$HADOOP_PREFIX/contrib/mrunit</tt>.
 MRUnit is designed to augment an existing unit test framework such as JUnit.</p></div>
 <div class="paragraph"><p>To use MRUnit, add the MRUnit JAR from the above path to the classpath or
 project build path in your development environment (ant buildfile, Eclipse

Modified: hadoop/mapreduce/trunk/src/contrib/mumak/bin/mumak.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/mumak/bin/mumak.sh?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/mumak/bin/mumak.sh (original)
+++ hadoop/mapreduce/trunk/src/contrib/mumak/bin/mumak.sh Fri May 27 17:03:23 2011
@@ -38,13 +38,13 @@ this="$bin/$script"
 
 MUMAK_HOME=`dirname $bin`
 if [ -d "$MUMAK_HOME/../../../build/classes" ]; then
-  HADOOP_HOME=`cd $MUMAK_HOME/../../.. ; pwd`
+  HADOOP_PREFIX=`cd $MUMAK_HOME/../../.. ; pwd`
   IN_RELEASE=0
 else
-  HADOOP_HOME=`cd $MUMAK_HOME/../.. ; pwd`
+  HADOOP_PREFIX=`cd $MUMAK_HOME/../.. ; pwd`
   IN_RELEASE=1
   
-  MAPRED_JAR=$HADOOP_HOME/hadoop-mapred-${HADOOP_VERSION}.jar
+  MAPRED_JAR=$HADOOP_PREFIX/hadoop-mapred-${HADOOP_VERSION}.jar
   if [ ! -e $MAPRED_JAR ]; then
     echo "Error: Cannot find $MAPRED_JAR."
     exit 1
@@ -64,15 +64,15 @@ then
 fi
 
 # Allow alternate conf dir location.
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
+HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_PREFIX/conf}"
 
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi
 
-# Define HADOOP_COMMON_HOME
+# Define HADOOP_PREFIX
 if [ "$HADOP_CORE_HOME" = "" ]; then
-  HADOOP_COMMON_HOME=$HADOOP_HOME
+  HADOOP_PREFIX=$HADOOP_PREFIX
 fi
 
 if [ "$JAVA_HOME" = "" ]; then
@@ -99,30 +99,30 @@ JAVA_HEAP_MAX=-Xmx1200m 
 CLASSPATH=${MUMAK_HOME}/conf:${HADOOP_CONF_DIR}:$JAVA_HOME/lib/tools.jar
 
 if [ $IN_RELEASE = 0 ]; then
-  CLASSPATH=${CLASSPATH}:${HADOOP_HOME}/build/contrib/${project}/classes
-  CLASSPATH=${CLASSPATH}:${HADOOP_HOME}/build/classes
-  CLASSPATH=${CLASSPATH}:${HADOOP_HOME}/build
-  CLASSPATH=${CLASSPATH}:${HADOOP_HOME}/build/tools
+  CLASSPATH=${CLASSPATH}:${HADOOP_PREFIX}/build/contrib/${project}/classes
+  CLASSPATH=${CLASSPATH}:${HADOOP_PREFIX}/build/classes
+  CLASSPATH=${CLASSPATH}:${HADOOP_PREFIX}/build
+  CLASSPATH=${CLASSPATH}:${HADOOP_PREFIX}/build/tools
   # add libs to CLASSPATH
-  for f in $HADOOP_HOME/lib/hadoop-core-*.jar; do
+  for f in $HADOOP_PREFIX/lib/hadoop-core-*.jar; do
     CLASSPATH=${CLASSPATH}:$f;
   done
 
-  for f in $HADOOP_HOME/build/ivy/lib/${project}/common/*.jar; do
+  for f in $HADOOP_PREFIX/build/ivy/lib/${project}/common/*.jar; do
     CLASSPATH=${CLASSPATH}:$f;
   done
 
-  for f in $HADOOP_HOME/build/ivy/lib/${project}/test/*.jar; do
+  for f in $HADOOP_PREFIX/build/ivy/lib/${project}/test/*.jar; do
     CLASSPATH=${CLASSPATH}:$f;
   done
 else
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME;
-  for f in $HADOOP_HOME/lib/*.jar; do
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX;
+  for f in $HADOOP_PREFIX/lib/*.jar; do
     CLASSPATH=${CLASSPATH}:$f;
   done
   CLASSPATH=${CLASSPATH}:$MUMAK_HOME/hadoop-${HADOOP_VERSION}-${project}.jar
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/hadoop-mapred-${HADOOP_VERSION}.jar
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/hadoop-mapred-tools-${HADOOP_VERSION}.jar
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/hadoop-mapred-${HADOOP_VERSION}.jar
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/hadoop-mapred-tools-${HADOOP_VERSION}.jar
 fi
 
 # check envvars which might override default args
@@ -134,7 +134,7 @@ fi
 
 # default log directory & file
 if [ "$HADOOP_LOG_DIR" = "" ]; then
-  HADOOP_LOG_DIR="$HADOOP_HOME/logs"
+  HADOOP_LOG_DIR="$HADOOP_PREFIX/logs"
 fi
 
 # default policy file for service-level authorization
@@ -144,18 +144,18 @@ fi
 
 # setup 'java.library.path' for native-hadoop code if necessary
 JAVA_LIBRARY_PATH=''
-if [ -d "${HADOOP_COMMON_HOME}/build/native" -o -d "${HADOOP_COMMON_HOME}/lib/native" ]; then
+if [ -d "${HADOOP_PREFIX}/build/native" -o -d "${HADOOP_PREFIX}/lib/native" ]; then
   JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
   
-  if [ -d "$HADOOP_COMMON_HOME/build/native" ]; then
-    JAVA_LIBRARY_PATH=${HADOOP_COMMON_HOME}/build/native/${JAVA_PLATFORM}/lib
+  if [ -d "$HADOOP_PREFIX/build/native" ]; then
+    JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/build/native/${JAVA_PLATFORM}/lib
   fi
   
-  if [ -d "${HADOOP_COMMON_HOME}/lib/native" ]; then
+  if [ -d "${HADOOP_PREFIX}/lib/native" ]; then
     if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
-      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_COMMON_HOME}/lib/native/${JAVA_PLATFORM}
+      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_PREFIX}/lib/native/${JAVA_PLATFORM}
     else
-      JAVA_LIBRARY_PATH=${HADOOP_COMMON_HOME}/lib/native/${JAVA_PLATFORM}
+      JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/lib/native/${JAVA_PLATFORM}
     fi
   fi
 fi

Modified: hadoop/mapreduce/trunk/src/contrib/raid/README
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/raid/README?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/raid/README (original)
+++ hadoop/mapreduce/trunk/src/contrib/raid/README Fri May 27 17:03:23 2011
@@ -34,7 +34,7 @@ results in saving 25% to 30% of storage 
 
 BUILDING:
 
-In HADOOP_HOME, run ant package to build Hadoop and its contrib packages.
+In HADOOP_PREFIX, run ant package to build Hadoop and its contrib packages.
 
 --------------------------------------------------------------------------------
 
@@ -43,7 +43,7 @@ INSTALLING and CONFIGURING:
 The entire code is packaged in the form of a single jar file hadoop-*-raid.jar.
 To use HDFS Raid, you need to put the above mentioned jar file on
 the CLASSPATH. The easiest way is to copy the hadoop-*-raid.jar
-from HADOOP_HOME/build/contrib/raid to HADOOP_HOME/lib. Alternatively
+from HADOOP_PREFIX/build/contrib/raid to HADOOP_PREFIX/lib. Alternatively
 you can modify HADOOP_CLASSPATH to include this jar, in conf/hadoop-env.sh.
 
 There is a single configuration file named raid.xml that describes the HDFS 
@@ -150,15 +150,15 @@ reload within seconds and the new conten
 Designate one machine in your cluster to run the RaidNode software. You can run this daemon
 on any machine irrespective of whether that machine is running any other hadoop daemon or not. 
 You can start the RaidNode by running the following on the selected machine:
-nohup $HADOOP_HOME/bin/hadoop org.apache.hadoop.raid.RaidNode >> /xxx/logs/hadoop-root-raidnode-hadoop.xxx.com.log &
+nohup $HADOOP_PREFIX/bin/hadoop org.apache.hadoop.raid.RaidNode >> /xxx/logs/hadoop-root-raidnode-hadoop.xxx.com.log &
 
 Optionally, we provide two scripts to start and stop the RaidNode. Copy the scripts
-start-raidnode.sh and stop-raidnode.sh to the directory $HADOOP_HOME/bin in the machine
+start-raidnode.sh and stop-raidnode.sh to the directory $HADOOP_PREFIX/bin in the machine
 you would like to deploy the daemon. You can start or stop the RaidNode by directly 
 callying the scripts from that machine. If you want to deploy the RaidNode remotely,
-copy start-raidnode-remote.sh and stop-raidnode-remote.sh to $HADOOP_HOME/bin at 
+copy start-raidnode-remote.sh and stop-raidnode-remote.sh to $HADOOP_PREFIX/bin at 
 the machine from which you want to trigger the remote deployment and create a text
-file $HADOOP_HOME/conf/raidnode at the same machine containing the name of the server
+file $HADOOP_PREFIX/conf/raidnode at the same machine containing the name of the server
 where the RaidNode should run. These scripts run ssh to the specified machine and 
 invoke start/stop-raidnode.sh there. As an example, you might want to change
 start-mapred.sh in the JobTracker machine so that it automatically calls 
@@ -166,7 +166,7 @@ start-raidnode-remote.sh (and do the equ
 stop-raidnode-remote.sh).
 
 To validate the integrity of a file system, run RaidFSCK as follows:
-$HADOOP_HOME/bin/hadoop org.apache.hadoop.raid.RaidShell -fsck [path]
+$HADOOP_PREFIX/bin/hadoop org.apache.hadoop.raid.RaidShell -fsck [path]
 
 This will print a list of corrupt files (i.e., files which have lost too many
 blocks and can no longer be fixed by Raid).

Modified: hadoop/mapreduce/trunk/src/contrib/raid/bin/start-raidnode-remote.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/raid/bin/start-raidnode-remote.sh?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/raid/bin/start-raidnode-remote.sh (original)
+++ hadoop/mapreduce/trunk/src/contrib/raid/bin/start-raidnode-remote.sh Fri May 27 17:03:23 2011
@@ -35,7 +35,7 @@ fi
 if [ -f "${HADOOP_CONF_DIR}/raidnode" ]; then
   export HADOOP_SLAVES="${HADOOP_CONF_DIR}/raidnode"
   echo "Starting raidnode at "`cat ${HADOOP_SLAVES}`
-  "$bin"/slaves.sh --config $HADOOP_CONF_DIR cd "$HADOOP_HOME" \; "$bin/start-raidnode.sh"
+  "$bin"/slaves.sh --config $HADOOP_CONF_DIR cd "$HADOOP_PREFIX" \; "$bin/start-raidnode.sh"
 else
   echo "No raidnode file in ${HADOOP_CONF_DIR}/raidnode"
 fi

Modified: hadoop/mapreduce/trunk/src/contrib/raid/bin/stop-raidnode-remote.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/raid/bin/stop-raidnode-remote.sh?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/raid/bin/stop-raidnode-remote.sh (original)
+++ hadoop/mapreduce/trunk/src/contrib/raid/bin/stop-raidnode-remote.sh Fri May 27 17:03:23 2011
@@ -34,7 +34,7 @@ fi
 if [ -f "${HADOOP_CONF_DIR}/raidnode" ]; then
   export HADOOP_SLAVES="${HADOOP_CONF_DIR}/raidnode"
   echo "Stopping raidnode at "`cat ${HADOOP_SLAVES}`
-  "$bin"/slaves.sh --config $HADOOP_CONF_DIR cd "$HADOOP_HOME" \; "$bin/stop-raidnode.sh"
+  "$bin"/slaves.sh --config $HADOOP_CONF_DIR cd "$HADOOP_PREFIX" \; "$bin/stop-raidnode.sh"
 else
   echo "No raidnode file in ${HADOOP_CONF_DIR}/raidnode"
 fi

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/DumpTypedBytes.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/DumpTypedBytes.java?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/DumpTypedBytes.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/DumpTypedBytes.java Fri May 27 17:03:23 2011
@@ -91,7 +91,7 @@ public class DumpTypedBytes implements T
   }
 
   private void printUsage() {
-    System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar"
+    System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar"
         + " dumptb <glob-pattern>");
     System.out.println("  Dumps all files that match the given pattern to " +
         "standard output as typed bytes.");

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/HadoopStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/HadoopStreaming.java?rev=1128394&r1=1128393&r2=1128394&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/HadoopStreaming.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/HadoopStreaming.java Fri May 27 17:03:23 2011
@@ -56,7 +56,7 @@ public class HadoopStreaming {
   }
   
   private static void printUsage() {
-    System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar"
+    System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar"
         + " [options]");
     System.out.println("Options:");
     System.out.println("  dumptb <glob-pattern> Dumps all files that match the" 



Mime
View raw message