hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r834556 - in /hadoop/mapreduce/branches/HDFS-641: ./ conf/ src/c++/ src/c++/libhdfs/ src/contrib/ src/contrib/capacity-scheduler/ src/contrib/data_join/ src/contrib/dynamic-scheduler/ src/contrib/eclipse-plugin/ src/contrib/fairscheduler/ s...
Date Tue, 10 Nov 2009 17:25:50 GMT
Author: omalley
Date: Tue Nov 10 17:25:49 2009
New Revision: 834556

URL: http://svn.apache.org/viewvc?rev=834556&view=rev
Log:
catching up to trunk

Added:
    hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java
      - copied unchanged from r834555, hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java
    hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java
      - copied unchanged from r834555, hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java
Modified:
    hadoop/mapreduce/branches/HDFS-641/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/.gitignore   (props changed)
    hadoop/mapreduce/branches/HDFS-641/CHANGES.txt
    hadoop/mapreduce/branches/HDFS-641/build.xml
    hadoop/mapreduce/branches/HDFS-641/conf/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/conf/capacity-scheduler.xml.template   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/c++/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/c++/libhdfs/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/c++/libhdfs/hdfsJniHelper.h
    hadoop/mapreduce/branches/HDFS-641/src/contrib/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/build-contrib.xml   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/build.xml   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/capacity-scheduler/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/data_join/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/dynamic-scheduler/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/eclipse-plugin/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java
    hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java
    hadoop/mapreduce/branches/HDFS-641/src/contrib/index/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/java/org/apache/hadoop/mrunit/mapreduce/mock/MockReduceContext.java
    hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/test/org/apache/hadoop/mrunit/TestReduceDriver.java
    hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/test/org/apache/hadoop/mrunit/mapreduce/TestReduceDriver.java
    hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/build.xml
    hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/contrib/vaidya/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/fair_scheduler.xml
    hadoop/mapreduce/branches/HDFS-641/src/examples/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/java/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/FileOutputCommitter.java
    hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobTracker.java
    hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java
    hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java
    hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java
    hadoop/mapreduce/branches/HDFS-641/src/test/mapred/   (props changed)
    hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapred/TestJobCleanup.java
    hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java
    hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/util/TestProcfsBasedProcessTree.java
    hadoop/mapreduce/branches/HDFS-641/src/webapps/job/   (props changed)

Propchange: hadoop/mapreduce/branches/HDFS-641/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,2 +1,2 @@
 /hadoop/core/branches/branch-0.19/mapred:713112
-/hadoop/mapreduce/trunk:817878-832891
+/hadoop/mapreduce/trunk:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/.gitignore
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/HADOOP-4687/mapred/.gitignore:776175-784965
 /hadoop/core/branches/branch-0.19/mapred/.gitignore:713112
 /hadoop/core/trunk/.gitignore:784664-785643
-/hadoop/mapreduce/trunk/.gitignore:817878-832891
+/hadoop/mapreduce/trunk/.gitignore:817878-834555

Modified: hadoop/mapreduce/branches/HDFS-641/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/CHANGES.txt?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/CHANGES.txt (original)
+++ hadoop/mapreduce/branches/HDFS-641/CHANGES.txt Tue Nov 10 17:25:49 2009
@@ -11,6 +11,9 @@
 
   IMPROVEMENTS
 
+    MAPREDUCE-707. Provide a jobconf property for explicitly assigning a job to 
+    a pool in the Fair Scheduler. (Alan Heirich via matei)
+
     MAPREDUCE-999. Improve Sqoop test speed and refactor tests.
     (Aaron Kimball via tomwhite)
 
@@ -66,6 +69,14 @@
     MAPREDUCE-1153. Fix tasktracker metrics when trackers are decommissioned.
     (sharad)
 
+    MAPREDUCE-1128. Fix MRUnit to prohibit iterating over values twice. (Aaron
+    Kimball via cdouglas)
+
+    MAPREDUCE-665. Move libhdfs to HDFS subproject. (Eli Collins via dhruba)
+
+    MAPREDUCE-1196. Fix FileOutputCommitter to use the deprecated cleanupJob
+    api correctly. (acmurthy)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES
@@ -850,3 +861,19 @@
 
     MAPREDUCE-1038. Weave Mumak aspects only if related files have changed.
     (Aaron Kimball via cdouglas)
+
+    MAPREDUCE-1037. Continue running contrib tests if Sqoop tests fail. (Aaron
+    Kimball via cdouglas)
+
+    MAPREDUCE-1163. Remove unused, hard-coded paths from libhdfs. (Allen
+    Wittenauer via cdouglas)
+
+    MAPREDUCE-962. Fix a NullPointerException while killing task process 
+    trees. (Ravi Gummadi via yhemanth)
+
+    MAPREDUCE-1177. Correct setup/cleanup inversion in
+    JobTracker::getTaskReports. (Vinod Kumar Vavilapalli via cdouglas)
+
+    MAPREDUCE-1178. Fix ClassCastException in MultipleInputs by adding 
+    a DelegatingRecordReader. (Amareshwari Sriramadasu and Jay Booth 
+    via sharad)

Modified: hadoop/mapreduce/branches/HDFS-641/build.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/build.xml?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/build.xml (original)
+++ hadoop/mapreduce/branches/HDFS-641/build.xml Tue Nov 10 17:25:49 2009
@@ -48,7 +48,6 @@
   <property name="c++.utils.src" value="${c++.src}/utils"/>
   <property name="c++.pipes.src" value="${c++.src}/pipes"/>
   <property name="c++.examples.pipes.src" value="${examples.dir}/pipes"/>
-  <property name="c++.libhdfs.src" value="${c++.src}/libhdfs"/>
   <property name="librecordio.src" value="${c++.src}/librecordio"/>
   <property name="tools.src" value="${basedir}/src/tools"/>
 
@@ -73,7 +72,6 @@
   <property name="build.c++" value="${build.dir}/c++-build/${build.platform}"/>
   <property name="build.c++.utils" value="${build.c++}/utils"/>
   <property name="build.c++.pipes" value="${build.c++}/pipes"/>
-  <property name="build.c++.libhdfs" value="${build.c++}/libhdfs"/>
   <property name="build.c++.examples.pipes" 
             value="${build.c++}/examples/pipes"/>
   <property name="build.docs" value="${build.dir}/docs"/>
@@ -118,9 +116,6 @@
   <property name="test.mapred.commit.tests.file" value="${test.src.dir}/commit-tests" />
   <property name="test.mapred.all.tests.file" value="${test.src.dir}/all-tests" />
 
-  <property name="test.libhdfs.conf.dir" value="${c++.libhdfs.src}/tests/conf"/>
-  <property name="test.libhdfs.dir" value="${test.build.dir}/libhdfs"/>
-
   <property name="librecordio.test.dir" value="${test.build.dir}/librecordio"/>
   <property name="web.src.dir" value="${basedir}/src/web"/>
   <property name="src.webapps" value="${basedir}/src/webapps"/>
@@ -389,7 +384,7 @@
 
   <target name="compile-core" depends="clover, compile-mapred-classes, compile-c++" description="Compile core only"/> 
 
-  <target name="compile-contrib" depends="compile-core,tools,compile-c++-libhdfs">
+  <target name="compile-contrib" depends="compile-core,tools">
      <subant target="compile">
         <property name="version" value="${version}"/>
         <property name="hadoop-core.version" value="${hadoop-core.version}"/>
@@ -648,7 +643,7 @@
     <fail if="testsfailed">Tests failed!</fail>
   </target>
 
-  <target name="test" depends="test-c++-libhdfs, jar-test, test-core" description="Run all unit tests">
+  <target name="test" depends="jar-test, test-core" description="Run all unit tests">
     <subant target="test-contrib">
       <fileset file="${basedir}/build.xml"/>
      </subant>
@@ -1148,26 +1143,6 @@
      </subant>  	
   </target>
 	
- <target name="test-c++-libhdfs" depends="compile-c++-libhdfs, compile-core" if="islibhdfs">
-    <delete dir="${test.libhdfs.dir}"/>
-    <mkdir dir="${test.libhdfs.dir}"/>
-    <mkdir dir="${test.libhdfs.dir}/logs"/>
-    <mkdir dir="${test.libhdfs.dir}/hdfs/name"/>
-
-    <exec dir="${build.c++.libhdfs}" executable="${make.cmd}" failonerror="true">
-        <env key="OS_NAME" value="${os.name}"/>
-        <env key="OS_ARCH" value="${os.arch}"/>
-        <env key="JVM_ARCH" value="${jvm.arch}"/>
-        <env key="LIBHDFS_BUILD_DIR" value="${build.c++.libhdfs}"/>
-        <env key="HADOOP_HOME" value="${basedir}"/>
-        <env key="HADOOP_CONF_DIR" value="${test.libhdfs.conf.dir}"/>
-        <env key="HADOOP_LOG_DIR" value="${test.libhdfs.dir}/logs"/>
-        <env key="LIBHDFS_SRC_DIR" value="${c++.libhdfs.src}"/>
-        <env key="LIBHDFS_INSTALL_DIR" value="${install.c++}/lib"/>  
-        <env key="LIB_DIR" value="${common.ivy.lib.dir}"/>
-		<arg value="test"/>
-    </exec>
-  </target>
 
 <!-- ================================================================== -->
 <!-- librecordio targets.                                               -->
@@ -1220,16 +1195,8 @@
           searchpath="yes" failonerror="yes">
        <arg value="-if"/>
     </exec>
-    <antcall target="create-c++-configure-libhdfs"/>
-  </target>
+   </target>
    
-  <target name="create-c++-configure-libhdfs" depends="check-c++-libhdfs" if="islibhdfs">
-    <exec executable="autoreconf" dir="${c++.libhdfs.src}" 
-          searchpath="yes" failonerror="yes">
-       <arg value="-if"/>
-    </exec>
-  </target>
-
   <target name="check-c++-makefiles" depends="init" if="compile.c++">
     <condition property="need.c++.utils.makefile">
        <not> <available file="${build.c++.utils}/Makefile"/> </not>
@@ -1242,33 +1209,6 @@
     </condition>
   </target>
 
-  <target name="check-c++-libhdfs">
-    <condition property="islibhdfs">
-      <and>
-        <isset property="compile.c++"/>
-        <isset property="libhdfs"/>
-      </and>
-    </condition>
-  </target>
-
-  <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs" if="islibhdfs">
-    <condition property="need.c++.libhdfs.makefile">
-       <not> <available file="${build.c++.libhdfs}/Makefile"/> </not>
-    </condition>
-  </target>
-
-  <target name="create-c++-libhdfs-makefile" depends="check-c++-makefile-libhdfs" 
-                                           if="need.c++.libhdfs.makefile">
-    <mkdir dir="${build.c++.libhdfs}"/>
-    <chmod file="${c++.libhdfs.src}/configure" perm="ugo+x"/>
-    <exec executable="${c++.libhdfs.src}/configure" dir="${build.c++.libhdfs}"
-          failonerror="yes">
-      <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
-      <env key="JVM_ARCH" value="${jvm.arch}"/>
-      <arg value="--prefix=${install.c++}"/>
-    </exec>
-  </target>
-
   <target name="create-c++-utils-makefile" depends="check-c++-makefiles" 
                                            if="need.c++.utils.makefile">
     <mkdir dir="${build.c++.utils}"/>
@@ -1335,15 +1275,6 @@
   <target name="compile-c++-examples" 
           depends="compile-c++-examples-pipes"/>
 
-  <target name="compile-c++-libhdfs" depends="create-c++-libhdfs-makefile" if="islibhdfs">
-    <exec executable="${make.cmd}" dir="${build.c++.libhdfs}" searchpath="yes"
-          failonerror="yes">
-      <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
-      <env key="JVM_ARCH" value="${jvm.arch}"/>
-      <arg value="install"/>
-    </exec>
-  </target>
-
  <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover.  To use, specify -Dclover.home=&lt;base of clover installation&gt; -Drun.clover=true on the command line."/>
 
 <target name="clover.setup" if="clover.enabled">

Propchange: hadoop/mapreduce/branches/HDFS-641/conf/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/conf:713112
 /hadoop/core/trunk/conf:784664-785643
-/hadoop/mapreduce/trunk/conf:817878-832891
+/hadoop/mapreduce/trunk/conf:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/conf/capacity-scheduler.xml.template
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/conf/capacity-scheduler.xml.template:713112
 /hadoop/core/trunk/conf/capacity-scheduler.xml.template:776175-785643
-/hadoop/mapreduce/trunk/conf/capacity-scheduler.xml.template:817878-832891
+/hadoop/mapreduce/trunk/conf/capacity-scheduler.xml.template:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/c++/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/c++:713112
 /hadoop/core/trunk/src/c++:776175-784663
-/hadoop/mapreduce/trunk/src/c++:817878-832891
+/hadoop/mapreduce/trunk/src/c++:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/c++/libhdfs/
------------------------------------------------------------------------------
    svn:mergeinfo = 

Modified: hadoop/mapreduce/branches/HDFS-641/src/c++/libhdfs/hdfsJniHelper.h
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/c%2B%2B/libhdfs/hdfsJniHelper.h?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/c++/libhdfs/hdfsJniHelper.h (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/c++/libhdfs/hdfsJniHelper.h Tue Nov 10 17:25:49 2009
@@ -30,8 +30,6 @@
 
 #define PATH_SEPARATOR ':'
 
-#define USER_CLASSPATH "/home/y/libexec/hadoop/conf:/home/y/libexec/hadoop/lib/hadoop-0.1.0.jar"
-
 
 /** Denote the method we want to invoke as STATIC or INSTANCE */
 typedef enum {

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib:713112
 /hadoop/core/trunk/src/contrib:784664-785643
-/hadoop/mapreduce/trunk/src/contrib:817878-832891
+/hadoop/mapreduce/trunk/src/contrib:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/build-contrib.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/build-contrib.xml:713112
 /hadoop/core/trunk/src/contrib/build-contrib.xml:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/build-contrib.xml:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/build-contrib.xml:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/build.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/build.xml:713112
 /hadoop/core/trunk/src/contrib/build.xml:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/build.xml:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/build.xml:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/capacity-scheduler/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/capacity-scheduler:713112
 /hadoop/core/trunk/src/contrib/capacity-scheduler:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/capacity-scheduler:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/capacity-scheduler:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/data_join/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/data_join:713112
 /hadoop/core/trunk/src/contrib/data_join:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/data_join:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/data_join:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/dynamic-scheduler/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/src/contrib/dynamic-scheduler:713112
 /hadoop/core/trunk/src/contrib/dynamic-scheduler:784975-786373
-/hadoop/mapreduce/trunk/src/contrib/dynamic-scheduler:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/dynamic-scheduler:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/eclipse-plugin/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/core/src/contrib/eclipse-plugin:713112
 /hadoop/core/trunk/src/contrib/eclipse-plugin:776175-784663
-/hadoop/mapreduce/trunk/src/contrib/eclipse-plugin:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/eclipse-plugin:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/fairscheduler:713112
 /hadoop/core/trunk/src/contrib/fairscheduler:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/fairscheduler:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/fairscheduler:817878-834555

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java Tue Nov 10 17:25:49 2009
@@ -61,6 +61,8 @@
    */
   public static final long ALLOC_RELOAD_WAIT = 5 * 1000; 
 
+  public static final String EXPLICIT_POOL_PROPERTY = "mapred.fairscheduler.pool";
+
   private final FairScheduler scheduler;
   
   // Map and reduce minimum allocations for each pool
@@ -391,7 +393,7 @@
    */
   public synchronized void setPool(JobInProgress job, String pool) {
     removeJob(job);
-    job.getJobConf().set(poolNameProperty, pool);
+    job.getJobConf().set(EXPLICIT_POOL_PROPERTY, pool);
     addJob(job);
   }
 
@@ -403,13 +405,16 @@
   }
   
   /**
-   * Get the pool name for a JobInProgress from its configuration. This uses
-   * the "project" property in the jobconf by default, or the property set with
-   * "mapred.fairscheduler.poolnameproperty".
+   * Get the pool name for a JobInProgress from its configuration.  This uses
+   * the value of mapred.fairscheduler.pool if specified, otherwise the value 
+   * of the property named in mapred.fairscheduler.poolnameproperty if that is
+   * specified.  Otherwise if neither is specified it uses the "user.name" property 
+   * in the jobconf by default.
    */
   public String getPoolName(JobInProgress job) {
     Configuration conf = job.getJobConf();
-    return conf.get(poolNameProperty, Pool.DEFAULT_POOL_NAME).trim();
+    return conf.get(EXPLICIT_POOL_PROPERTY,
+      conf.get(poolNameProperty, Pool.DEFAULT_POOL_NAME)).trim();
   }
 
   /**

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java Tue Nov 10 17:25:49 2009
@@ -52,6 +52,7 @@
       "test-pools").getAbsolutePath();
   
   private static final String POOL_PROPERTY = "pool";
+  private static final String EXPLICIT_POOL_PROPERTY = "mapred.fairscheduler.pool";
   
   private static int jobCounter;
   
@@ -2471,6 +2472,87 @@
     checkAssignment("tt2", "attempt_test_0001_r_000001_0 on tt2");
     checkAssignment("tt2", "attempt_test_0004_r_000000_0 on tt2");
   }
+
+  /**
+   * This test uses the mapred.fairscheduler.pool property to assign jobs to pools.
+   */
+  public void testPoolAssignment() throws Exception {
+    // Set up pools file
+    PrintWriter out = new PrintWriter(new FileWriter(ALLOC_FILE));
+    out.println("<?xml version=\"1.0\"?>");
+    out.println("<allocations>");
+    out.println("<pool name=\"default\">");
+    out.println("<schedulingMode>fair</schedulingMode>");
+    out.println("</pool>");
+    out.println("<pool name=\"poolA\">");
+    out.println("<schedulingMode>fair</schedulingMode>");
+    out.println("</pool>");
+    out.println("</allocations>");
+    out.close();
+    scheduler.getPoolManager().reloadAllocs();
+    Pool defaultPool = scheduler.getPoolManager().getPool("default");
+    Pool poolA = scheduler.getPoolManager().getPool("poolA");
+ 
+    // Submit a job to the default pool.  All specifications take default values.
+    JobInProgress job1 = submitJob(JobStatus.RUNNING, 1, 3);
+
+    assertEquals(1,    defaultPool.getMapSchedulable().getDemand());
+    assertEquals(3,    defaultPool.getReduceSchedulable().getDemand());
+    assertEquals(0,    poolA.getMapSchedulable().getDemand());
+    assertEquals(0,    poolA.getReduceSchedulable().getDemand());
+
+    // Submit a job to the default pool and move it to poolA using setPool.
+    JobInProgress job2 = submitJob(JobStatus.RUNNING, 5, 7);
+
+    assertEquals(6,    defaultPool.getMapSchedulable().getDemand());
+    assertEquals(10,   defaultPool.getReduceSchedulable().getDemand());
+    assertEquals(0,    poolA.getMapSchedulable().getDemand());
+    assertEquals(0,    poolA.getReduceSchedulable().getDemand());
+
+    scheduler.getPoolManager().setPool(job2, "poolA");
+    assertEquals("poolA", scheduler.getPoolManager().getPoolName(job2));
+
+    defaultPool.getMapSchedulable().updateDemand();
+    defaultPool.getReduceSchedulable().updateDemand();
+    poolA.getMapSchedulable().updateDemand();
+    poolA.getReduceSchedulable().updateDemand();
+
+    assertEquals(1,    defaultPool.getMapSchedulable().getDemand());
+    assertEquals(3,    defaultPool.getReduceSchedulable().getDemand());
+    assertEquals(5,    poolA.getMapSchedulable().getDemand());
+    assertEquals(7,    poolA.getReduceSchedulable().getDemand());
+
+    // Submit a job to poolA by specifying mapred.fairscheduler.pool
+    JobConf jobConf = new JobConf(conf);
+    jobConf.setNumMapTasks(11);
+    jobConf.setNumReduceTasks(13);
+    jobConf.set(POOL_PROPERTY, "nonsense"); // test that this is overridden
+    jobConf.set(EXPLICIT_POOL_PROPERTY, "poolA");
+    JobInProgress job3 = new FakeJobInProgress(jobConf, taskTrackerManager,
+        null, UtilsForTests.getJobTracker());
+    job3.getStatus().setRunState(JobStatus.RUNNING);
+    taskTrackerManager.submitJob(job3);
+
+    assertEquals(1,    defaultPool.getMapSchedulable().getDemand());
+    assertEquals(3,    defaultPool.getReduceSchedulable().getDemand());
+    assertEquals(16,   poolA.getMapSchedulable().getDemand());
+    assertEquals(20,   poolA.getReduceSchedulable().getDemand());
+
+    // Submit a job to poolA by specifying pool and not mapred.fairscheduler.pool
+    JobConf jobConf2 = new JobConf(conf);
+    jobConf2.setNumMapTasks(17);
+    jobConf2.setNumReduceTasks(19);
+    jobConf2.set(POOL_PROPERTY, "poolA");
+    JobInProgress job4 = new FakeJobInProgress(jobConf2, taskTrackerManager,
+        null, UtilsForTests.getJobTracker());
+    job4.getStatus().setRunState(JobStatus.RUNNING);
+    taskTrackerManager.submitJob(job4);
+
+    assertEquals(1,    defaultPool.getMapSchedulable().getDemand());
+    assertEquals(3,    defaultPool.getReduceSchedulable().getDemand());
+    assertEquals(33,   poolA.getMapSchedulable().getDemand());
+    assertEquals(39,   poolA.getReduceSchedulable().getDemand());
+  }
   
   private void advanceTime(long time) {
     clock.advance(time);

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/index/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/index:713112
 /hadoop/core/trunk/src/contrib/index:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/index:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/index:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/mrunit:713112
 /hadoop/core/trunk/src/contrib/mrunit:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/mrunit:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/mrunit:817878-834555

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/java/org/apache/hadoop/mrunit/mapreduce/mock/MockReduceContext.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/java/org/apache/hadoop/mrunit/mapreduce/mock/MockReduceContext.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/java/org/apache/hadoop/mrunit/mapreduce/mock/MockReduceContext.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/java/org/apache/hadoop/mrunit/mapreduce/mock/MockReduceContext.java Tue Nov 10 17:25:49 2009
@@ -63,20 +63,41 @@
   private class InspectableIterable implements Iterable<VALUEIN> {
     private Iterable<VALUEIN> base;
     private VALUEIN lastVal;
+    private boolean used; // if true, don't re-iterate.
 
     public InspectableIterable(final Iterable<VALUEIN> baseCollection) {
       this.base = baseCollection;
     }
 
     public Iterator<VALUEIN> iterator() {
-      return new InspectableIterator(this.base.iterator());
+      if (used) {
+        return new NullIterator();
+      } else {
+        used = true;
+        return new InspectableIterator(this.base.iterator());
+      }
     }
 
     public VALUEIN getLastVal() {
       return lastVal;
     }
 
-    private class InspectableIterator 
+    private class NullIterator
+        extends ReduceContextImpl<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.ValueIterator
+        implements Iterator<VALUEIN> {
+      public VALUEIN next() {
+        return null;
+      }
+
+      public boolean hasNext() {
+        return false;
+      }
+
+      public void remove() {
+      }
+    }
+
+    private class InspectableIterator
         extends ReduceContextImpl<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.ValueIterator
         implements Iterator<VALUEIN> {
       private Iterator<VALUEIN> iter;

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/test/org/apache/hadoop/mrunit/TestReduceDriver.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/test/org/apache/hadoop/mrunit/TestReduceDriver.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/test/org/apache/hadoop/mrunit/TestReduceDriver.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/test/org/apache/hadoop/mrunit/TestReduceDriver.java Tue Nov 10 17:25:49 2009
@@ -22,13 +22,17 @@
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Iterator;
 import java.util.List;
 
 import junit.framework.TestCase;
 
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.lib.LongSumReducer;
 import org.apache.hadoop.mrunit.types.Pair;
 import org.junit.Before;
@@ -222,5 +226,45 @@
       // expected.
     }
   }
+
+  /**
+   * Reducer that counts its values twice; the second iteration
+   * according to mapreduce semantics should be empty.
+   */
+  private static class DoubleIterReducer<K, V>
+      extends MapReduceBase implements Reducer<K, V, K, LongWritable> {
+    public void reduce(K key, Iterator<V> values,
+        OutputCollector<K, LongWritable> out, Reporter r) throws IOException {
+      long count = 0;
+
+      while (values.hasNext()) {
+        count++;
+        values.next();
+      }
+
+      // This time around, iteration should yield no values.
+      while (values.hasNext()) {
+        count++;
+        values.next();
+      }
+      out.collect(key, new LongWritable(count));
+    }
+  }
+
+  @Test
+  public void testDoubleIteration() {
+    reducer = new DoubleIterReducer<Text, LongWritable>();
+    driver = new ReduceDriver<Text, LongWritable, Text, LongWritable>(
+        reducer);
+
+    driver
+        .withInputKey(new Text("foo"))
+        .withInputValue(new LongWritable(1))
+        .withInputValue(new LongWritable(1))
+        .withInputValue(new LongWritable(1))
+        .withInputValue(new LongWritable(1))
+        .withOutput(new Text("foo"), new LongWritable(4))
+        .runTest();
+  }
 }
 

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/test/org/apache/hadoop/mrunit/mapreduce/TestReduceDriver.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/test/org/apache/hadoop/mrunit/mapreduce/TestReduceDriver.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/test/org/apache/hadoop/mrunit/mapreduce/TestReduceDriver.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/mrunit/src/test/org/apache/hadoop/mrunit/mapreduce/TestReduceDriver.java Tue Nov 10 17:25:49 2009
@@ -223,5 +223,43 @@
       // expected.
     }
   }
+
+  /**
+   * Reducer that counts its values twice; the second iteration
+   * according to mapreduce semantics should be empty.
+   */
+  private static class DoubleIterReducer<K, V>
+      extends Reducer<K, V, K, LongWritable> {
+    public void reduce(K key, Iterable<V> values, Context c)
+        throws IOException, InterruptedException {
+      long count = 0;
+
+      for (V val : values) {
+        count++;
+      }
+
+      // This time around, iteration should yield no values.
+      for (V val : values) {
+        count++;
+      }
+      c.write(key, new LongWritable(count));
+    }
+  }
+
+  @Test
+  public void testDoubleIteration() {
+    reducer = new DoubleIterReducer<Text, LongWritable>();
+    driver = new ReduceDriver<Text, LongWritable, Text, LongWritable>(
+        reducer);
+
+    driver
+        .withInputKey(new Text("foo"))
+        .withInputValue(new LongWritable(1))
+        .withInputValue(new LongWritable(1))
+        .withInputValue(new LongWritable(1))
+        .withInputValue(new LongWritable(1))
+        .withOutput(new Text("foo"), new LongWritable(4))
+        .runTest();
+  }
 }
 

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/src/contrib/sqoop:713112
 /hadoop/core/trunk/src/contrib/sqoop:784975-786373
-/hadoop/mapreduce/trunk/src/contrib/sqoop:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/sqoop:817878-834555

Modified: hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/build.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/build.xml?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/build.xml (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/contrib/sqoop/build.xml Tue Nov 10 17:25:49 2009
@@ -149,7 +149,7 @@
         <fileset dir="${src.test}" includes="**/${testcase}.java"/>
       </batchtest>
     </junit>
-    <fail if="tests.failed">Tests failed!</fail>
+    <antcall target="checkfailure"/>
   </target>
 
   <target name="doc">

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/streaming/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/streaming:713112
 /hadoop/core/trunk/src/contrib/streaming:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/streaming:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/streaming:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/contrib/vaidya/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/vaidya:713112
 /hadoop/core/trunk/src/contrib/vaidya:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/vaidya:817878-832891
+/hadoop/mapreduce/trunk/src/contrib/vaidya:817878-834555

Modified: hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/fair_scheduler.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/fair_scheduler.xml?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/fair_scheduler.xml (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/docs/src/documentation/content/xdocs/fair_scheduler.xml Tue Nov 10 17:25:49 2009
@@ -163,6 +163,15 @@
           </tr>
           <tr>
           <td>
+            mapred.fairscheduler.pool
+          </td>
+          <td>
+            Specify the pool that a job belongs in.  
+            If this is specified then mapred.fairscheduler.poolnameproperty is ignored.
+          </td>
+          </tr>
+          <tr>
+          <td>
             mapred.fairscheduler.poolnameproperty
           </td>
           <td>
@@ -171,17 +180,8 @@
             (i.e. one pool for each user). 
             Another useful value is <em>group.name</em> to create a
             pool per Unix group.
-            Finally, a common setting is to use a non-standard property
-            such as <em>pool.name</em> as the pool name property, and make it
-            default to <em>mapreduce.job.mapreduce.job.user.name</em> through the following setting:<br/>
-            <code>&lt;property&gt;</code><br/> 
-            <code>&nbsp;&nbsp;&lt;name&gt;pool.name&lt;/name&gt;</code><br/>
-            <code>&nbsp;&nbsp;&lt;value&gt;${mapreduce.job.mapreduce.job.user.name}&lt;/value&gt;</code><br/>
-            <code>&lt;/property&gt;</code><br/>
-            This allows you to specify the pool name explicitly for some jobs
-            through the jobconf (e.g. passing <em>-Dpool.name=&lt;name&gt;</em>
-            to <em>bin/hadoop jar</em>, while having the default be the user's
-            pool.
+            mapred.fairscheduler. poolnameproperty is used only for jobs in which 
+            mapred.fairscheduler.pool is not explicitly set.
           </td>
           </tr>
           <tr>

Propchange: hadoop/mapreduce/branches/HDFS-641/src/examples/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/examples:713112
 /hadoop/core/trunk/src/examples:776175-784663
-/hadoop/mapreduce/trunk/src/examples:817878-832891
+/hadoop/mapreduce/trunk/src/examples:817878-834555

Propchange: hadoop/mapreduce/branches/HDFS-641/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/java:713112
 /hadoop/core/trunk/src/mapred:776175-785643
-/hadoop/mapreduce/trunk/src/java:817878-832891
+/hadoop/mapreduce/trunk/src/java:817878-834555

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/FileOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/FileOutputCommitter.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/FileOutputCommitter.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/FileOutputCommitter.java Tue Nov 10 17:25:49 2009
@@ -64,7 +64,7 @@
   
   public void commitJob(JobContext context) throws IOException {
     // delete the _temporary folder in the output folder
-    cleanup(context);
+    cleanupJob(context);
     // check if the output-dir marking is required
     if (shouldMarkOutputDir(context.getJobConf())) {
       // create a _success file in the output folder
@@ -85,9 +85,10 @@
       fileSys.create(filePath).close();
     }
   }
-  
-  // Deletes the _temporary folder in the job's output dir.
-  private void cleanup(JobContext context) throws IOException {
+
+  @Override
+  @Deprecated
+  public void cleanupJob(JobContext context) throws IOException {
     JobConf conf = context.getJobConf();
     // do the clean up of temporary directory
     Path outputPath = FileOutputFormat.getOutputPath(conf);
@@ -107,7 +108,7 @@
   public void abortJob(JobContext context, int runState) 
   throws IOException {
     // simply delete the _temporary dir from the o/p folder of the job
-    cleanup(context);
+    cleanupJob(context);
   }
   
   public void setupTask(TaskAttemptContext context) throws IOException {

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobTracker.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobTracker.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapred/JobTracker.java Tue Nov 10 17:25:49 2009
@@ -3414,9 +3414,9 @@
         return getMapTaskReports(JobID.downgrade(jobid));
       case REDUCE :
         return getReduceTaskReports(JobID.downgrade(jobid));
-      case JOB_SETUP:
+      case JOB_CLEANUP:
         return getCleanupTaskReports(JobID.downgrade(jobid));
-      case JOB_CLEANUP :
+      case JOB_SETUP :
         return getSetupTaskReports(JobID.downgrade(jobid));
     }
     return new TaskReport[0];

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java Tue Nov 10 17:25:49 2009
@@ -119,17 +119,9 @@
     return splits;
   }
 
-  @SuppressWarnings("unchecked")
+  @Override
   public RecordReader<K, V> createRecordReader(InputSplit split,
       TaskAttemptContext context) throws IOException, InterruptedException {
-
-    // Find the InputFormat and then the RecordReader from the
-    // TaggedInputSplit.
-    TaggedInputSplit taggedInputSplit = (TaggedInputSplit) split;
-    InputFormat<K, V> inputFormat = (InputFormat<K, V>) ReflectionUtils
-      .newInstance(taggedInputSplit.getInputFormatClass(),
-         context.getConfiguration());
-    return inputFormat.createRecordReader(taggedInputSplit.getInputSplit(),
-      context);
+    return new DelegatingRecordReader<K, V>(split, context);
   }
 }

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java Tue Nov 10 17:25:49 2009
@@ -107,14 +107,15 @@
    */
   public void commitJob(JobContext context) throws IOException {
     // delete the _temporary folder and create a _done file in the o/p folder
-    cleanup(context);
+    cleanupJob(context);
     if (shouldMarkOutputDir(context.getConfiguration())) {
       markOutputDirSuccessful(context);
     }
   }
-  
-  // Delete the _temporary folder in the output dir.
-  private void cleanup(JobContext context) throws IOException {
+
+  @Override
+  @Deprecated
+  public void cleanupJob(JobContext context) throws IOException {
     if (outputPath != null) {
       Path tmpDir = new Path(outputPath, FileOutputCommitter.TEMP_DIR_NAME);
       FileSystem fileSys = tmpDir.getFileSystem(context.getConfiguration());
@@ -134,7 +135,7 @@
   public void abortJob(JobContext context, JobStatus.State state) 
   throws IOException {
     // delete the _temporary folder
-    cleanup(context);
+    cleanupJob(context);
   }
   
   /**

Modified: hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java Tue Nov 10 17:25:49 2009
@@ -228,12 +228,19 @@
 
   /** Verify that the given process id is same as its process group id.
    * @param pidStr Process id of the to-be-verified-process
+   * @param procfsDir  Procfs root dir
    */
-  private static boolean assertPidPgrpidForMatch(String pidStr) {
+  static boolean checkPidPgrpidForMatch(String pidStr, String procfsDir) {
     Integer pId = Integer.parseInt(pidStr);
     // Get information for this process
     ProcessInfo pInfo = new ProcessInfo(pId);
-    pInfo = constructProcessInfo(pInfo);
+    pInfo = constructProcessInfo(pInfo, procfsDir);
+    if (pInfo == null) {
+      // process group leader may have finished execution, but we still need to
+      // kill the subProcesses in the process group.
+      return true;
+    }
+
     //make sure that pId and its pgrpId match
     if (!pInfo.getPgrpId().equals(pId)) {
       LOG.warn("Unexpected: Process with PID " + pId +
@@ -258,7 +265,7 @@
                        boolean inBackground)
          throws IOException {
     // Make sure that the pid given is a process group leader
-    if (!assertPidPgrpidForMatch(pgrpId)) {
+    if (!checkPidPgrpidForMatch(pgrpId, PROCFS)) {
       throw new IOException("Process with PID " + pgrpId  +
                           " is not a process group leader.");
     }
@@ -391,15 +398,6 @@
   }
 
   /**
-   * 
-   * Construct the ProcessInfo using the process' PID and procfs and return the
-   * same. Returns null on failing to read from procfs,
-   */
-  private static ProcessInfo constructProcessInfo(ProcessInfo pinfo) {
-    return constructProcessInfo(pinfo, PROCFS);
-  }
-
-  /**
    * Construct the ProcessInfo using the process' PID and procfs rooted at the
    * specified directory and return the same. It is provided mainly to assist
    * testing purposes.
@@ -422,6 +420,8 @@
       in = new BufferedReader(fReader);
     } catch (FileNotFoundException f) {
       // The process vanished in the interim!
+      LOG.warn("The process " + pinfo.getPid()
+          + " may have finished in the interim.");
       return ret;
     }
 
@@ -436,6 +436,11 @@
             .parseInt(m.group(4)), Integer.parseInt(m.group(5)), Long
             .parseLong(m.group(7)));
       }
+      else {
+        LOG.warn("Unexpected: procfs stat file is not in the expected format"
+            + " for process with pid " + pinfo.getPid());
+        ret = null;
+      }
     } catch (IOException io) {
       LOG.warn("Error reading the stream " + io);
       ret = null;

Propchange: hadoop/mapreduce/branches/HDFS-641/src/test/mapred/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred:713112
 /hadoop/core/trunk/src/test/mapred:776175-785643
-/hadoop/mapreduce/trunk/src/test/mapred:817878-832891
+/hadoop/mapreduce/trunk/src/test/mapred:817878-834555

Modified: hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapred/TestJobCleanup.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapred/TestJobCleanup.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapred/TestJobCleanup.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapred/TestJobCleanup.java Tue Nov 10 17:25:49 2009
@@ -41,6 +41,8 @@
   private static String TEST_ROOT_DIR =
       new File(System.getProperty("test.build.data", "/tmp") + "/" 
                + "test-job-cleanup").toString();
+  private static final String CUSTOM_CLEANUP_FILE_NAME = 
+    "_custom_cleanup";
   private static final String ABORT_KILLED_FILE_NAME = 
     "_custom_abort_killed";
   private static final String ABORT_FAILED_FILE_NAME = 
@@ -86,6 +88,21 @@
   }
   
   /** 
+   * Committer with deprecated {@link FileOutputCommitter#cleanupJob(JobContext)}
+   * making a _failed/_killed in the output folder
+   */
+  static class CommitterWithCustomDeprecatedCleanup extends FileOutputCommitter {
+    @Override
+    public void cleanupJob(JobContext context) throws IOException {
+      System.err.println("---- HERE ----");
+      JobConf conf = context.getJobConf();
+      Path outputPath = FileOutputFormat.getOutputPath(conf);
+      FileSystem fs = outputPath.getFileSystem(conf);
+      fs.create(new Path(outputPath, CUSTOM_CLEANUP_FILE_NAME)).close();
+    }
+  }
+  
+  /** 
    * Committer with abort making a _failed/_killed in the output folder
    */
   static class CommitterWithCustomAbort extends FileOutputCommitter {
@@ -263,4 +280,26 @@
                   new String[] {FileOutputCommitter.SUCCEEDED_FILE_NAME,
                                 ABORT_FAILED_FILE_NAME});
   }
+
+  /**
+   * Test if a failed job with custom committer runs the deprecated
+   * {@link FileOutputCommitter#cleanupJob(JobContext)} code for api 
+   * compatibility testing.
+   */
+  public void testCustomCleanup() throws IOException {
+    // check with a successful job
+    testSuccessfulJob(CUSTOM_CLEANUP_FILE_NAME, 
+                      CommitterWithCustomDeprecatedCleanup.class,
+                      new String[] {});
+    
+    // check with a failed job
+    testFailedJob(CUSTOM_CLEANUP_FILE_NAME, 
+                  CommitterWithCustomDeprecatedCleanup.class, 
+                  new String[] {FileOutputCommitter.SUCCEEDED_FILE_NAME});
+    
+    // check with a killed job
+    testKilledJob(TestJobCleanup.CUSTOM_CLEANUP_FILE_NAME, 
+                  CommitterWithCustomDeprecatedCleanup.class, 
+                  new String[] {FileOutputCommitter.SUCCEEDED_FILE_NAME});
+  }
 }
\ No newline at end of file

Modified: hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java Tue Nov 10 17:25:49 2009
@@ -17,20 +17,128 @@
  */
 package org.apache.hadoop.mapreduce.lib.input;
 
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
 import java.util.Map;
 
-import junit.framework.TestCase;
-
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.HadoopTestCase;
+import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * @see TestDelegatingInputFormat
  */
-public class TestMultipleInputs extends TestCase {
+public class TestMultipleInputs extends HadoopTestCase {
+
+  public TestMultipleInputs() throws IOException {
+    super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
+  }
+
+  private static final Path ROOT_DIR = new Path("testing/mo");
+  private static final Path IN1_DIR = new Path(ROOT_DIR, "input1");
+  private static final Path IN2_DIR = new Path(ROOT_DIR, "input2");
+  private static final Path OUT_DIR = new Path(ROOT_DIR, "output");
+
+  private Path getDir(Path dir) {
+    // Hack for local FS that does not have the concept of a 'mounting point'
+    if (isLocalFS()) {
+      String localPathRoot = System.getProperty("test.build.data", "/tmp")
+          .replace(' ', '+');
+      dir = new Path(localPathRoot, dir);
+    }
+    return dir;
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    Path rootDir = getDir(ROOT_DIR);
+    Path in1Dir = getDir(IN1_DIR);
+    Path in2Dir = getDir(IN2_DIR);
+
+    Configuration conf = createJobConf();
+    FileSystem fs = FileSystem.get(conf);
+    fs.delete(rootDir, true);
+    if (!fs.mkdirs(in1Dir)) {
+      throw new IOException("Mkdirs failed to create " + in1Dir.toString());
+    }
+    if (!fs.mkdirs(in2Dir)) {
+      throw new IOException("Mkdirs failed to create " + in2Dir.toString());
+    }
+  }
+
+  @Test
+  public void testDoMultipleInputs() throws IOException {
+    Path in1Dir = getDir(IN1_DIR);
+    Path in2Dir = getDir(IN2_DIR);
+
+    Path outDir = getDir(OUT_DIR);
+
+    Configuration conf = createJobConf();
+    FileSystem fs = FileSystem.get(conf);
+    fs.delete(outDir, true);
+
+    DataOutputStream file1 = fs.create(new Path(in1Dir, "part-0"));
+    file1.writeBytes("a\nb\nc\nd\ne");
+    file1.close();
+
+    // write tab delimited to second file because we're doing
+    // KeyValueInputFormat
+    DataOutputStream file2 = fs.create(new Path(in2Dir, "part-0"));
+    file2.writeBytes("a\tblah\nb\tblah\nc\tblah\nd\tblah\ne\tblah");
+    file2.close();
+
+    Job job = new Job(conf);
+    job.setJobName("mi");
+
+    MultipleInputs.addInputPath(job, in1Dir, TextInputFormat.class,
+        MapClass.class);
+    MultipleInputs.addInputPath(job, in2Dir, KeyValueTextInputFormat.class,
+        KeyValueMapClass.class);
+
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(Text.class);
+    job.setOutputKeyClass(NullWritable.class);
+    job.setOutputValueClass(Text.class);
+    job.setReducerClass(ReducerClass.class);
+    FileOutputFormat.setOutputPath(job, outDir);
+
+    boolean success = false;
+    try {
+      success = job.waitForCompletion(true);
+    } catch (InterruptedException ie) {
+      throw new RuntimeException(ie);
+    } catch (ClassNotFoundException instante) {
+      throw new RuntimeException(instante);
+    }
+    if (!success)
+      throw new RuntimeException("Job failed!");
+
+    // copy bytes a bunch of times for the ease of readLine() - whatever
+    BufferedReader output = new BufferedReader(new InputStreamReader(fs
+        .open(new Path(outDir, "part-r-00000"))));
+    // reducer should have counted one key from each file
+    assertTrue(output.readLine().equals("a 2"));
+    assertTrue(output.readLine().equals("b 2"));
+    assertTrue(output.readLine().equals("c 2"));
+    assertTrue(output.readLine().equals("d 2"));
+    assertTrue(output.readLine().equals("e 2"));
+  }
+
   @SuppressWarnings("unchecked")
   public void testAddInputPathWithFormat() throws IOException {
     final Job conf = new Job();
@@ -50,7 +158,7 @@
     MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,
        MapClass.class);
     MultipleInputs.addInputPath(conf, new Path("/bar"),
-       KeyValueTextInputFormat.class, MapClass2.class);
+        KeyValueTextInputFormat.class, KeyValueMapClass.class);
     final Map<Path, InputFormat> inputs = MultipleInputs
        .getInputFormatMap(conf);
     final Map<Path, Class<? extends Mapper>> maps = MultipleInputs
@@ -60,12 +168,42 @@
     assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
        .getClass());
     assertEquals(MapClass.class, maps.get(new Path("/foo")));
-    assertEquals(MapClass2.class, maps.get(new Path("/bar")));
+    assertEquals(KeyValueMapClass.class, maps.get(new Path("/bar")));
   }
 
-  static class MapClass extends Mapper<String, String, String, String> {
+  static final Text blah = new Text("blah");
+
+  // these 3 classes do a reduce side join with 2 different mappers
+  static class MapClass extends Mapper<LongWritable, Text, Text, Text> {
+    // receives "a", "b", "c" as values
+    @Override
+    public void map(LongWritable key, Text value, Context ctx)
+        throws IOException, InterruptedException {
+      ctx.write(value, blah);
+    }
   }
 
-  static class MapClass2 extends MapClass {
+  static class KeyValueMapClass extends Mapper<Text, Text, Text, Text> {
+    // receives "a", "b", "c" as keys
+    @Override
+    public void map(Text key, Text value, Context ctx) throws IOException,
+        InterruptedException {
+      ctx.write(key, blah);
+    }
   }
+
+  static class ReducerClass extends Reducer<Text, Text, NullWritable, Text> {
+    // should receive 2 rows for each key
+    int count = 0;
+
+    @Override
+    public void reduce(Text key, Iterable<Text> values, Context ctx)
+        throws IOException, InterruptedException {
+      count = 0;
+      for (Text value : values)
+        count++;
+      ctx.write(NullWritable.get(), new Text(key.toString() + " " + count));
+    }
+  }
+
 }

Modified: hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/util/TestProcfsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/util/TestProcfsBasedProcessTree.java?rev=834556&r1=834555&r2=834556&view=diff
==============================================================================
--- hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/util/TestProcfsBasedProcessTree.java (original)
+++ hadoop/mapreduce/branches/HDFS-641/src/test/mapred/org/apache/hadoop/mapreduce/util/TestProcfsBasedProcessTree.java Tue Nov 10 17:25:49 2009
@@ -423,6 +423,34 @@
   }
 
   /**
+   * Verifies ProcfsBasedProcessTree.checkPidPgrpidForMatch() in case of
+   * 'constructProcessInfo() returning null' by not writing stat file for the
+   * mock process
+   * @throws IOException if there was a problem setting up the
+   *                      fake procfs directories or files.
+   */
+  public void testDestroyProcessTree() throws IOException {
+    // test process
+    String pid = "100";
+    // create the fake procfs root directory. 
+    File procfsRootDir = new File(TEST_ROOT_DIR, "proc");
+
+    try {
+      setupProcfsRootDir(procfsRootDir);
+      
+      // crank up the process tree class.
+      ProcfsBasedProcessTree processTree = new ProcfsBasedProcessTree(
+                        pid, true, 100L, procfsRootDir.getAbsolutePath());
+
+      // Let us not create stat file for pid 100.
+      assertTrue(ProcfsBasedProcessTree.checkPidPgrpidForMatch(
+                            pid, procfsRootDir.getAbsolutePath()));
+    } finally {
+      FileUtil.fullyDelete(procfsRootDir);
+    }
+  }
+  
+  /**
    * Test the correctness of process-tree dump.
    * 
    * @throws IOException

Propchange: hadoop/mapreduce/branches/HDFS-641/src/webapps/job/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Nov 10 17:25:49 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/webapps/job:713112
 /hadoop/core/trunk/src/webapps/job:776175-785643
-/hadoop/mapreduce/trunk/src/webapps/job:817878-832891
+/hadoop/mapreduce/trunk/src/webapps/job:817878-834555



Mime
View raw message