hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ste...@apache.org
Subject svn commit: r885145 [2/34] - in /hadoop/mapreduce/branches/MAPREDUCE-233: ./ .eclipse.templates/ .eclipse.templates/.launches/ conf/ ivy/ lib/ src/benchmarks/gridmix/ src/benchmarks/gridmix/pipesort/ src/benchmarks/gridmix2/ src/benchmarks/gridmix2/src...
Date Sat, 28 Nov 2009 20:26:22 GMT

Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Sat Nov 28 20:26:01 2009
@@ -1,7 +1,9 @@
 build
+build.properties
 ivy/ivy-*.jar
 logs
 .classpath
 .externalToolBuilders
+.launches
 .project
 .settings

Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Nov 28 20:26:01 2009
@@ -1,2 +1,2 @@
 /hadoop/core/branches/branch-0.19/mapred:713112
-/hadoop/mapreduce/trunk:804974-807678
+/hadoop/mapreduce/trunk:804974-884916

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/.eclipse.templates/.classpath
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/.eclipse.templates/.classpath?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/.eclipse.templates/.classpath (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/.eclipse.templates/.classpath Sat Nov 28 20:26:01 2009
@@ -25,10 +25,11 @@
 	<classpathentry kind="src" path="src/contrib/vaidya/src/java"/>
 	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
 	<classpathentry kind="var" path="ANT_HOME/lib/ant.jar"/>
-	<classpathentry kind="lib" path="lib/hadoop-core-0.21.0-dev.jar"/>
-	<classpathentry kind="lib" path="lib/hadoop-core-test-0.21.0-dev.jar"/>
-	<classpathentry kind="lib" path="lib/hadoop-hdfs-0.21.0-dev.jar"/>
-	<classpathentry kind="lib" path="lib/hadoop-hdfs-test-0.21.0-dev.jar"/>
+	<classpathentry kind="lib" path="lib/hadoop-core-0.22.0-dev.jar"/>
+	<classpathentry kind="lib" path="lib/hadoop-core-test-0.22.0-dev.jar"/>
+	<classpathentry kind="lib" path="lib/hadoop-hdfs-0.22.0-dev.jar"/>
+	<classpathentry kind="lib" path="lib/hadoop-hdfs-test-0.22.0-dev.jar"/>
+	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/avro-1.2.0.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/commons-cli-1.2.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/commons-codec-1.3.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/commons-el-1.0.jar"/>
@@ -38,6 +39,8 @@
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/commons-net-1.4.1.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/core-3.1.1.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/hsqldb-1.8.0.10.jar"/>
+	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/jackson-core-asl-1.0.1.jar"/>
+	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/jackson-mapper-asl-1.0.1.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/jasper-compiler-5.5.12.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/jasper-runtime-5.5.12.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/jets3t-0.6.1.jar"/>
@@ -49,13 +52,15 @@
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/kfs-0.3.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/log4j-1.2.15.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/oro-2.0.8.jar"/>
+	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/paranamer-1.5.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/servlet-api-2.5-6.1.14.jar"/>
-	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/slf4j-api-1.4.3.jar"/>
-	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/slf4j-log4j12-1.4.3.jar"/>
+	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/slf4j-api-1.5.8.jar"/>
+	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/slf4j-simple-1.5.8.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/xmlenc-0.52.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/index/common/lucene-core-2.3.1.jar"/>
 	<classpathentry kind="lib" path="build/test/classes"/>
 	<classpathentry kind="lib" path="build/classes"/>
 	<classpathentry kind="lib" path="conf"/>
+	<classpathentry kind="lib" path="build/ivy/lib/sqoop/common/commons-io-1.4.jar"/>
 	<classpathentry kind="output" path="build/eclipse-classes"/>
 </classpath>

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/.gitignore
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/.gitignore?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/.gitignore (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/.gitignore Sat Nov 28 20:26:01 2009
@@ -16,9 +16,11 @@
 *~
 .classpath
 .project
+.launches
 .settings
 .svn
 build/
+build.properties
 conf/masters
 conf/slaves
 conf/core-site.xml

Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/.gitignore
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Nov 28 20:26:01 2009
@@ -1,4 +1,4 @@
 /hadoop/core/branches/HADOOP-4687/mapred/.gitignore:776175-784965
 /hadoop/core/branches/branch-0.19/mapred/.gitignore:713112
 /hadoop/core/trunk/.gitignore:784664-785643
-/hadoop/mapreduce/trunk/.gitignore:804974-807678
+/hadoop/mapreduce/trunk/.gitignore:804974-884916

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/CHANGES.txt?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/CHANGES.txt (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/CHANGES.txt Sat Nov 28 20:26:01 2009
@@ -4,6 +4,100 @@
 
   INCOMPATIBLE CHANGES
 
+  NEW FEATURES
+
+    MAPREDUCE-1017. Compression and output splitting for Sqoop.
+    (Aaron Kimball via tomwhite)
+
+    MAPREDUCE-1026. Does mutual authentication of the shuffle
+    transfers using a shared JobTracker generated key.
+    (Boris Shkolnik via ddas)
+
+  IMPROVEMENTS
+
+    MAPREDUCE-1198. Alternatively schedule different types of tasks in
+    fair share scheduler. (Scott Chen via matei)
+
+    MAPREDUCE-707. Provide a jobconf property for explicitly assigning a job to 
+    a pool in the Fair Scheduler. (Alan Heirich via matei)
+
+    MAPREDUCE-999. Improve Sqoop test speed and refactor tests.
+    (Aaron Kimball via tomwhite)
+
+    MAPREDUCE-906. Update Sqoop documentation. (Aaron Kimball via cdouglas)
+
+    MAPREDUCE-947. Added commitJob and abortJob apis to OutputCommitter.
+    Enhanced FileOutputCommitter to create a _SUCCESS file for successful
+    jobs. (Amar Kamat & Jothi Padmanabhan via acmurthy) 
+
+    MAPREDUCE-1103. Added more metrics to Jobtracker. (sharad) 
+
+    MAPREDUCE-1048. Add occupied/reserved slot usage summary on jobtracker UI.
+    (Amareshwari Sriramadasu and Hemanth Yamijala via sharad)
+
+    MAPREDUCE-1090. Modified log statement in TaskMemoryManagerThread to
+    include task attempt id. (yhemanth)
+
+    MAPREDUCE-1069. Implement Sqoop API refactoring. (Aaron Kimball via
+    tomwhite)
+
+    MAPREDUCE-1036. Document Sqoop API. (Aaron Kimball via cdouglas)
+
+    MAPREDUCE-1189. Reduce ivy console output to ovservable level (cos)
+
+    MAPREDUCE-1167. ProcfsBasedProcessTree collects rss memory information.
+    (Scott Chen via dhruba)
+
+    MAPREDUCE-1169. Improvements to mysqldump use in Sqoop.
+    (Aaron Kimball via tomwhite)
+
+  OPTIMIZATIONS
+
+    MAPREDUCE-270. Fix the tasktracker to optionally send an out-of-band
+    heartbeat on task-completion for better job-latency. (acmurthy) 
+    Configuration changes:
+      add mapreduce.tasktracker.outofband.heartbeat 
+
+  BUG FIXES
+
+    MAPREDUCE-1089. Fix NPE in fair scheduler preemption when tasks are
+    scheduled but not running. (Todd Lipcon via matei)
+
+    MAPREDUCE-1014. Fix the libraries for common and hdfs. (omalley)
+
+    MAPREDUCE-1111. JT Jetty UI not working if we run mumak.sh 
+    off packaged distribution directory. (hong tang via mahadev)
+
+    MAPREDUCE-1133. Eclipse .classpath template has outdated jar files and is
+    missing some new ones. (cos)
+
+    MAPREDUCE-1098. Fixed the distributed-cache to not do i/o while holding a
+    global lock. (Amareshwari Sriramadasu via acmurthy)
+
+    MAPREDUCE-1158. Fix JT running maps and running reduces metrics.
+    (sharad)
+
+    MAPREDUCE-1160. Reduce verbosity of log lines in some Map/Reduce classes
+    to avoid filling up jobtracker logs on a busy cluster.
+    (Ravi Gummadi and Hong Tang via yhemanth)
+
+    MAPREDUCE-1153. Fix tasktracker metrics when trackers are decommissioned.
+    (sharad)
+
+    MAPREDUCE-1128. Fix MRUnit to prohibit iterating over values twice. (Aaron
+    Kimball via cdouglas)
+
+    MAPREDUCE-665. Move libhdfs to HDFS subproject. (Eli Collins via dhruba)
+
+    MAPREDUCE-1196. Fix FileOutputCommitter to use the deprecated cleanupJob
+    api correctly. (acmurthy)
+   
+    MAPREDUCE-1244. Fix eclipse-plugin's build dependencies. (gkesavan)
+
+Release 0.21.0 - Unreleased
+
+  INCOMPATIBLE CHANGES
+
     MAPREDUCE-516. Fix the starvation problem in the Capacity Scheduler 
     when running High RAM Jobs. (Arun Murthy via yhemanth)
 
@@ -30,6 +124,18 @@
     MAPREDUCE-479. Provide full task id to map output servlet rather than the
     reduce id, only. (Jiaqi Tan via cdouglas)
 
+    MAPREDUCE-873. Simplify job recovery. Incomplete jobs are resubmitted on 
+    jobtracker restart. Removes a public constructor in JobInProgress. (sharad)
+
+    HADOOP-6230. Moved process tree and memory calculator related classes from
+    Common to Map/Reduce. (Vinod Kumar Vavilapalli via yhemanth)
+
+    MAPREDUCE-157. Refactor job history APIs and change the history format to 
+    JSON. (Jothi Padmanabhan via sharad)
+
+    MAPREDUCE-849. Rename configuration properties. (Amareshwari Sriramadasu 
+    via sharad)
+
   NEW FEATURES
 
     MAPREDUCE-706. Support for FIFO pools in the fair scheduler.
@@ -78,6 +184,42 @@
     MAPREDUCE-768. Provide an option to dump jobtracker configuration in JSON
     format to standard output. (V.V.Chaitanya Krishna via yhemanth)
 
+    MAPREDUCE-824. Add support for a hierarchy of queues in the capacity 
+    scheduler. (Rahul Kumar Singh via yhemanth)
+
+    MAPREDUCE-751. Add Rumen, a tool for extracting statistics from job tracker
+    logs and generating job traces for simulation and analysis. (Dick King via
+    cdouglas)
+
+    MAPREDUCE-938. Postgresql support for Sqoop. (Aaron Kimball via tomwhite)	
+
+    MAPREDUCE-830. Add support for splittable compression to TextInputFormats.
+    (Abdul Qadeer via cdouglas)
+
+    MAPREDUCE-861. Add support for hierarchical queues in the Map/Reduce
+    framework. (Rahul Kumar Singh via yhemanth)
+
+    MAPREDUCE-776. Add Gridmix, a benchmark processing Rumen traces to simulate
+    a measured mix of jobs on a cluster. (cdouglas)
+
+    MAPREDUCE-862. Enhance JobTracker UI to display hierarchical queues.
+    (V.V.Chaitanya Krishna via yhemanth)
+
+    MAPREDUCE-777. Brand new apis to track and query jobs as a
+    replacement for JobClient. (Amareshwari Sriramadasu via acmurthy)
+
+    MAPREDUCE-775. Add native and streaming support for Vertica as an input
+    or output format taking advantage of parallel read and write properties of 
+    the DBMS. (Omer Trajman via ddas)
+
+    MAPREDUCE-679. XML-based metrics as JSP servlet for JobTracker.
+    (Aaron Kimball via tomwhite)
+
+    MAPREDUCE-980. Modify JobHistory to use Avro for serialization. (cutting)
+
+    MAPREDUCE-728. Add Mumak, a Hadoop map/reduce simulator. (Arun C Murthy,
+    Tamas Sarlos, Anirban Dasgupta, Guanying Wang, and Hong Tang via cdouglas)
+
   IMPROVEMENTS
 
     MAPREDUCE-816. Rename "local" mysql import to "direct" in Sqoop.
@@ -206,9 +348,6 @@
     MAPREDUCE-779. Added node health failure counts into 
     JobTrackerStatistics. (Sreekanth Ramakrishnan via yhemanth)
 
-    MAPREDUCE-372. Moves ChainMapper/Reducer to the new API.
-    (Amareshwari Sriramadasu via ddas)
-
     MAPREDUCE-789. Oracle support for Sqoop. (Aaron Kimball via tomwhite)
 
     MAPREDUCE-842. Setup secure permissions for localized job files,
@@ -259,6 +398,135 @@
     MAPREDUCE-476. Extend DistributedCache to work locally (LocalJobRunner).
     (Philip Zeyliger via tomwhite)
 
+    MAPREDUCE-750. Extensible ConnManager factory API. (Aaron Kimball via
+    tomwhite)
+
+    MAPREDUCE-825. JobClient completion poll interval of 5s causes slow tests
+    in local mode. (Aaron Kimball via tomwhite)
+
+    MAPREDUCE-910. Support counters in MRUnit. (Aaron Kimball via cdouglas)
+
+    MAPREDUCE-788. Update gridmix2 to use the new API (Amareshwari Sriramadasu
+    via cdouglas)
+
+    MAPREDUCE-875. Make DBRecordReader execute queries lazily. (Aaron Kimball 
+    via enis)
+
+    MAPREDUCE-318. Modularizes the shuffle code. (Jothi Padmanabhan and 
+    Arun Murthy via ddas)
+
+    MAPREDUCE-936. Allow a load difference for fairshare scheduler.
+    (Zheng Shao via dhruba)
+
+    MAPREDUCE-370. Update MultipleOutputs to use the API, merge funcitonality
+    of MultipleOutputFormat. (Amareshwari Sriramadasu via cdouglas)
+
+    MAPREDUCE-898. Changes DistributedCache to use the new API.
+    (Amareshwari Sriramadasu via ddas)
+
+    MAPREDUCE-876. Sqoop import of large tables can time out.
+    (Aaron Kimball via tomwhite)
+
+    MAPREDUCE-918. Test hsqldb server should be memory-only.
+    (Aaron Kimball via tomwhite)
+
+    MAPREDUCE-144. Includes dump of the process tree in task diagnostics when 
+    a task is killed due to exceeding memory limits.
+    (Vinod Kumar Vavilapalli via yhemanth)
+
+    MAPREDUCE-945. Modifies MRBench and TestMapRed to use ToolRunner so that
+    options such as queue name can be passed via command line.
+    (Sreekanth Ramakrishnan via yhemanth)
+
+    MAPREDUCE-963. Deprecate o.a.h.mapred.FileAlreadyExistsException and
+    replace it with o.a.h.fs.FileAlreadyExistsException.  (Boris Shkolnik
+    via szetszwo)
+
+    MAPREDUCE-960. Remove an unnecessary intermediate copy and obsolete API
+    from KeyValueLineRecordReader. (cdouglas)
+
+    MAPREDUCE-930. Modify Rumen to resolve paths in the canonical way, rather
+    than defaulting to the local filesystem. (cdouglas)
+
+    MAPREDUCE-944. Extend the LoadManager API of the fair-share scheduler
+    to support regulating tasks for a job based on resources currently in use
+    by that job. (dhruba)
+
+    MAPREDUCE-973. Move FailJob and SleepJob from examples to test. (cdouglas 
+    via omalley)
+
+    MAPREDUCE-966. Modify Rumen to clean up interfaces and simplify integration
+    with other tools. (Hong Tang via cdouglas)
+
+    MAPREDUCE-856. Setup secure permissions for distributed cache files.
+    (Vinod Kumar Vavilapalli via yhemanth)
+
+    MAPREDUCE-885. More efficient SQL queries for DBInputFormat. (Aaron Kimball 
+    via enis)
+
+    MAPREDUCE-284. Enables ipc.client.tcpnodelay in Tasktracker's Child.
+    (Ravi Gummadi via sharad)
+
+    MAPREDUCE-907. Sqoop should use more intelligent splits. (Aaron Kimball
+    via tomwhite)
+
+    MAPREDUCE-916. Split the documentation to match the project split.
+    (Corinne Chandel via omalley)
+
+    MAPREDUCE-649. Validate a copy by comparing the source and destination
+    checksums in distcp. Also adds an intra-task retry mechanism for errors
+    detected during the copy. (Ravi Gummadi via cdouglas)
+
+    MAPREDUCE-654. Add a -dryrun option to distcp printing a summary of the
+    file data to be copied, without actually performing the copy. (Ravi Gummadi
+    via cdouglas)
+
+    MAPREDUCE-664. Display the number of files deleted by distcp when the
+    -delete option is specified. (Ravi Gummadi via cdouglas)
+
+    MAPREDUCE-781. Let the name of distcp jobs be configurable. (Venkatesh S
+    via cdouglas)
+
+    MAPREDUCE-975. Add an API in job client to get the history file url for 
+    a given job id. (sharad)
+
+    MAPREDUCE-905. Add Eclipse launch tasks for MapReduce. (Philip Zeyliger
+    via tomwhite)
+
+    MAPREDUCE-277. Makes job history counters available on the job history
+    viewers. (Jothi Padmanabhan via ddas)
+
+    MAPREDUCE-893. Provides an ability to refresh queue configuration
+    without restarting the JobTracker.
+    (Vinod Kumar Vavilapalli and Rahul Kumar Singh via yhemanth)
+
+    MAPREDUCE-1011. Add build.properties to svn and git ignore. (omalley)
+
+    MAPREDUCE-954. Change Map-Reduce context objects to be interfaces.
+    (acmurthy) 
+
+    MAPREDUCE-639. Change Terasort example to reflect the 2009 updates. 
+    (omalley)
+
+    MAPREDUCE-1063. Document gridmix benchmark. (cdouglas)
+
+    MAPREDUCE-931. Use built-in interpolation classes for making up task
+    runtimes in Rumen. (Dick King via cdouglas)
+
+    MAPREDUCE-1012. Mark Context interfaces as public evolving. (Tom White via
+    cdouglas)
+
+    MAPREDUCE-971. Document use of distcp when copying to s3, managing timeouts
+    in particular. (Aaron Kimball via cdouglas)
+
+    HDFS-663. DFSIO for append. (shv)
+
+    HDFS-641. Move all of the components that depend on map/reduce to 
+    map/reduce. (omalley)
+
+    HADOOP-5107. Use Maven ant tasks to publish artifacts. (Giridharan Kesavan
+    via omalley)
+
   BUG FIXES
 
     MAPREDUCE-878. Rename fair scheduler design doc to 
@@ -473,3 +741,186 @@
     mapred.system.dir in the JobTracker. The JobTracker will bail out if it
     encounters such an exception. (Amar Kamat via ddas)
 
+    MAPREDUCE-430. Fix a bug related to task getting stuck in case of 
+    OOM error. (Amar Kamat via ddas)
+
+    MAPREDUCE-871. Fix ownership of Job/Task local files to have correct 
+    group ownership according to the egid of the tasktracker.
+    (Vinod Kumar Vavilapalli via yhemanth) 
+
+    MAPREDUCE-911. Fix a bug in TestTaskFail related to speculative 
+    execution. (Amareshwari Sriramadasu via sharad)
+
+    MAPREDUCE-687. Fix an assertion in TestMiniMRMapRedDebugScript.
+    (Amareshwari Sriramadasu via sharad)
+
+    MAPREDUCE-924. Fixes the TestPipes testcase to use Tool.
+    (Amareshwari Sriramadasu via sharad)
+
+    MAPREDUCE-903. Add Avro jar to eclipse classpath.
+    (Philip Zeyliger via tomwhite)
+
+    MAPREDUCE-943. Removes a testcase in TestNodeRefresh that doesn't make 
+    sense in the new Job recovery model. (Amar Kamat via ddas)
+
+    MAPREDUCE-764. TypedBytesInput's readRaw() does not preserve custom type
+    codes. (Klaas Bosteels via tomwhite)
+
+    HADOOP-6243. Fixes a NullPointerException in handling deprecated keys.
+    (Sreekanth Ramakrishnan via yhemanth)
+
+    MAPREDUCE-968. NPE in distcp encountered when placing _logs directory on
+    S3FileSystem. (Aaron Kimball via tomwhite)
+ 
+    MAPREDUCE-826. harchive doesn't use ToolRunner / harchive returns 0 even
+    if the job fails with exception (koji Noguchi via mahadev)
+
+    MAPREDUCE-839. unit test TestMiniMRChildTask fails on mac os-x (hong tang
+    via mahadev)
+
+    MAPREDUCE-112. Add counters for reduce input, output records to the new API.
+    (Jothi Padmanabhan via cdouglas)
+
+    MAPREDUCE-648. Fix two distcp bugs: (1) it should not launch a job if all
+    src paths are directories, and (2) it does not skip copying when updating
+    a single file.  (Ravi Gummadi via szetszwo)
+
+    MAPREDUCE-946. Fix a regression in LineRecordReader where the
+    maxBytesToConsume parameter is not set correctly. (cdouglas)
+
+    MAPREDUCE-977. Missing jackson jars from Eclipse template. (tomwhite)
+
+    MAPREDUCE-988. Fix a packaging issue in the contrib modules. (Hong Tang via
+    cdouglas)
+
+    MAPREDUCE-971. distcp does not always remove distcp.tmp.dir. (Aaron Kimball
+    via tomwhite)
+
+    MAPREDUCE-995. Fix a bug in JobHistory where tasks completing after the job
+    is closed cause a NPE. (Jothi Padmanabhan via cdouglas)
+
+    MAPREDUCE-953. Fix QueueManager to dump queue configuration in JSON format.
+    (V.V. Chaitanya Krishna via yhemanth)
+
+    MAPREDUCE-645. Prevent distcp from running a job when the destination is a
+    file, but the source is not. (Ravi Gummadi via cdouglas)
+
+    MAPREDUCE-1002. Flushed writer in JobQueueClient so queue information is
+    printed correctly. (V.V. Chaitanya Krishna via yhemanth)
+
+    MAPREDUCE-1003. Fix compilation problem in eclipse plugin when
+    eclipse.home is set. (Ravi Gummadi via yhemanth)
+
+    MAPREDUCE-941. Vaidya script fails on Solaris. (Chad Metcalf
+    via tomwhite)
+
+    MAPREDUCE-923. Sqoop classpath breaks for jar files with a
+    plus sign in their names. (Aaron Kimball via tomwhite)
+
+    MAPREDUCE-912. Add and standardize Apache license headers. (Chad Metcalf
+    via cdouglas)
+
+    MAPREDUCE-1022. Fix compilation of vertica testcases. (Vinod Kumar 
+    Vavilapalli via acmurthy)
+
+    MAPREDUCE-1000. Handle corrupt history files in JobHistory.initDone().
+    (Jothi Padmanabhan via sharad)
+
+    MAPREDUCE-1028. Fixed number of slots occupied by cleanup tasks to one
+    irrespective of slot size for the job.
+    (Ravi Gummadi via yhemanth)
+
+    MAPREDUCE-964. Fixed start and finish times of TaskStatus to be
+    consistent, thereby fixing inconsistencies in metering tasks.
+    (Sreekanth Ramakrishnan via yhemanth)
+
+    MAPREDUCE-1076. Deprecate ClusterStatus and add javadoc in ClusterMetrics.
+    (Amareshwari Sriramadasu via sharad)
+
+    MAPREDUCE-979. Fixed JobConf APIs related to memory parameters to return
+    values of new configuration variables when deprecated variables are
+    disabled. (Sreekanth Ramakrishnan via yhemanth)
+   
+    MAPREDUCE-1030. Modified scheduling algorithm to return a map and reduce
+    task per heartbeat in the capacity scheduler.
+    (Rahul Kumar Singh via yhemanth)
+
+    MAPREDUCE-1071. Use DataInputStream rather than FSDataInputStream in the
+    JobHistory EventReader. (Hong Tang via cdouglas)
+
+    MAPREDUCE-986. Fix Rumen to work with truncated task lines. (Dick King via
+    cdouglas)
+
+    MAPREDUCE-1029. Fix failing TestCopyFiles by restoring the unzipping of
+    HDFS webapps from the hdfs jar. (Aaron Kimball and Jothi Padmanabhan via
+    cdouglas)
+
+    MAPREDUCE-769. Make findbugs and javac warnings to zero.
+    (Amareshwari Sriramadasu via sharad)
+
+    MAPREDUCE-1104. Initialize RecoveryManager in JobTracker cstr called by
+    Mumak. (Hong Tang via cdouglas)
+
+    MAPREDUCE-1061. Add unit test validating byte specifications for gridmix
+    jobs. (cdouglas)
+
+    MAPREDUCE-1077. Fix Rumen so that truncated tasks do not mark the job as
+    successful. (Dick King via cdouglas)
+
+    MAPREDUCE-1041. Make TaskInProgress::taskStatuses map package-private.
+    (Jothi Padmanabhan via cdouglas)
+
+    MAPREDUCE-1070. Prevent a deadlock in the fair scheduler servlet.
+    (Todd Lipcon via cdouglas)
+
+    MAPREDUCE-1086. Setup Hadoop logging environment for tasks to point to
+    task related parameters. (Ravi Gummadi via yhemanth)
+
+    MAPREDUCE-1105. Remove max limit configuration in capacity scheduler in
+    favor of max capacity percentage thus allowing the limit to go over
+    queue capacity. (Rahul Kumar Singh via yhemanth)
+
+    MAPREDUCE-1016.  Make the job history log format JSON.  (cutting)
+
+    MAPREDUCE-1038. Weave Mumak aspects only if related files have changed.
+    (Aaron Kimball via cdouglas)
+
+    MAPREDUCE-1037. Continue running contrib tests if Sqoop tests fail. (Aaron
+    Kimball via cdouglas)
+
+    MAPREDUCE-1163. Remove unused, hard-coded paths from libhdfs. (Allen
+    Wittenauer via cdouglas)
+
+    MAPREDUCE-962. Fix a NullPointerException while killing task process 
+    trees. (Ravi Gummadi via yhemanth)
+
+    MAPREDUCE-1177. Correct setup/cleanup inversion in
+    JobTracker::getTaskReports. (Vinod Kumar Vavilapalli via cdouglas)
+
+    MAPREDUCE-1178. Fix ClassCastException in MultipleInputs by adding 
+    a DelegatingRecordReader. (Amareshwari Sriramadasu and Jay Booth 
+    via sharad)
+
+    MAPREDUCE-1068. Fix streaming job to show proper message if file is 
+    is not present. (Amareshwari Sriramadasu via sharad)
+
+    MAPREDUCE-1147. Add map output counters to new API. (Amar Kamat via
+    cdouglas)
+
+    MAPREDUCE-915. The debug scripts are run as the job user. (ddas)
+
+    MAPREDUCE-1007. Fix NPE in CapacityTaskScheduler.getJobs(). 
+    (V.V.Chaitanya Krishna via sharad)
+
+    MAPREDUCE-28. Refactor TestQueueManager and fix default ACLs.
+    (V.V.Chaitanya Krishna and Rahul K Singh via sharad)
+
+    MAPREDUCE-1182. Fix overflow in reduce causing allocations to exceed the
+    configured threshold. (cdouglas)
+
+    MAPREDUCE-1239. Fix contrib components build dependencies. 
+    (Giridharan Kesavan and omalley) 
+
+    MAPREDUCE-787. Fix JobSubmitter to honor user given symlink path.
+    (Amareshwari Sriramadasu via sharad)
+

Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Sat Nov 28 20:26:01 2009
@@ -0,0 +1,3 @@
+/hadoop/core/branches/branch-0.19/mapred/CHANGES.txt:713112
+/hadoop/mapreduce/branches/HDFS-641/CHANGES.txt:817878-835964
+/hadoop/mapreduce/trunk/CHANGES.txt:804974-884916

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/build.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/build.xml?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/build.xml (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/build.xml Sat Nov 28 20:26:01 2009
@@ -18,6 +18,7 @@
 -->
 
 <project name="hadoop-mapred" default="compile" 
+   xmlns:artifact="urn:maven-artifact-ant"
    xmlns:ivy="antlib:org.apache.ivy.ant"> 
 
   <!-- Load all the default properties, and any the user wants    -->
@@ -27,10 +28,7 @@
  
   <property name="Name" value="Hadoop-Mapred"/>
   <property name="name" value="hadoop-mapred"/>
-  <property name="version" value="0.21.0-dev"/>
-  <property name="hadoop-core.version" value="0.21.0-dev"/>
-  <property name="hadoop-mr.version" value="0.21.0-dev"/>
-  <property name="hadoop-hdfs.version" value="0.21.0-dev"/>
+  <property name="version" value="0.22.0-SNAPSHOT"/>
   <property name="final.name" value="${name}-${version}"/>
   <property name="test.final.name" value="${name}-test-${version}"/>
   <property name="examples.final.name" value="${name}-examples-${version}"/>
@@ -44,13 +42,11 @@
   <property name="conf.dir" value="${basedir}/conf"/>
   <property name="contrib.dir" value="${basedir}/src/contrib"/>
   <property name="docs.src" value="${basedir}/src/docs"/>
-  <property name="src.docs.cn" value="${basedir}/src/docs/cn"/>
   <property name="changes.src" value="${docs.src}/changes"/>
   <property name="c++.src" value="${basedir}/src/c++"/>
   <property name="c++.utils.src" value="${c++.src}/utils"/>
   <property name="c++.pipes.src" value="${c++.src}/pipes"/>
   <property name="c++.examples.pipes.src" value="${examples.dir}/pipes"/>
-  <property name="c++.libhdfs.src" value="${c++.src}/libhdfs"/>
   <property name="librecordio.src" value="${c++.src}/librecordio"/>
   <property name="tools.src" value="${basedir}/src/tools"/>
 
@@ -75,11 +71,9 @@
   <property name="build.c++" value="${build.dir}/c++-build/${build.platform}"/>
   <property name="build.c++.utils" value="${build.c++}/utils"/>
   <property name="build.c++.pipes" value="${build.c++}/pipes"/>
-  <property name="build.c++.libhdfs" value="${build.c++}/libhdfs"/>
   <property name="build.c++.examples.pipes" 
             value="${build.c++}/examples/pipes"/>
   <property name="build.docs" value="${build.dir}/docs"/>
-  <property name="build.docs.cn" value="${build.dir}/docs/cn"/>
   <property name="build.javadoc" value="${build.docs}/api"/>
   <property name="build.javadoc.timestamp" value="${build.javadoc}/index.html" />
   <property name="build.javadoc.dev" value="${build.docs}/dev-api"/>
@@ -88,6 +82,7 @@
   <property name="install.c++.examples" 
             value="${build.dir}/c++-examples/${build.platform}"/>
 
+  <property environment="env"/>
   <property name="test.src.dir" value="${basedir}/src/test"/>
   <property name="test.lib.dir" value="${basedir}/src/test/lib"/>
   <property name="test.build.dir" value="${build.dir}/test"/>
@@ -111,14 +106,15 @@
   <property name="test.junit.printsummary" value="yes" />
   <property name="test.junit.haltonfailure" value="no" />
   <property name="test.junit.maxmemory" value="512m" />
+  <property name="test.tmp.dir" value="${env.TMP}" />
+  <property name="test.temp.dir" value="${env.TEMP}" />
+
+  <property name="test.tools.input.dir" value="${basedir}/src/test/tools/data" />
 
   <property name="test.mapred.build.classes" value="${test.build.dir}/mapred/classes"/>
   <property name="test.mapred.commit.tests.file" value="${test.src.dir}/commit-tests" />
   <property name="test.mapred.all.tests.file" value="${test.src.dir}/all-tests" />
 
-  <property name="test.libhdfs.conf.dir" value="${c++.libhdfs.src}/tests/conf"/>
-  <property name="test.libhdfs.dir" value="${test.build.dir}/libhdfs"/>
-
   <property name="librecordio.test.dir" value="${test.build.dir}/librecordio"/>
   <property name="web.src.dir" value="${basedir}/src/web"/>
   <property name="src.webapps" value="${basedir}/src/webapps"/>
@@ -144,7 +140,7 @@
 
   <property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
   <property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/>
-  <property name="jdiff.stable" value="0.20.0"/>
+  <property name="jdiff.stable" value="0.21.0"/>
   <property name="jdiff.stable.javadoc" 
             value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
 
@@ -173,8 +169,13 @@
   <!-- IVY properteis set here -->
   <property name="ivy.dir" location="ivy" />
   <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
+  <property name="mvn.repo" value="http://repo2.maven.org/maven2"/>
   <property name="ivy.jar" location="${ivy.dir}/ivy-${ivy.version}.jar"/>
-  <property name="ivy_repo_url" value="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
+  <property name="repo" value="snapshots"/>
+  <property name="asfrepo" value="https://repository.apache.org/content/repositories/${repo}"/>
+  <property name="ant_task.jar" location="${ivy.dir}/maven-ant-tasks-${ant-task.version}.jar"/>
+  <property name="ant_task_repo_url" value="${mvn.repo}/org/apache/maven/maven-ant-tasks/${ant-task.version}/maven-ant-tasks-${ant-task.version}.jar"/>
+  <property name="ivy_repo_url" value="${mvn.repo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar"/>
   <property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml" />
   <property name="ivy.org" value="org.apache.hadoop"/>
   <property name="build.dir" location="build" />
@@ -184,8 +185,12 @@
   <property name="common.ivy.lib.dir" location="${build.ivy.lib.dir}/${ant.project.name}/common"/>
   <property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
   <property name="build.ivy.maven.dir" location="${build.ivy.dir}/maven" />
-  <property name="build.ivy.maven.pom" location="${build.ivy.maven.dir}/${name}-${version}.pom" />
-  <property name="build.ivy.maven.jar" location="${build.ivy.maven.dir}/${name}-${version}.jar" />
+  <property name="build.ivy.maven.pom" location="${build.ivy.maven.dir}/hadoop-mapred-${version}.pom" />
+  <property name="build.ivy.maven.jar" location="${build.ivy.maven.dir}/hadoop-mapred-${version}.jar" />
+  <property name="hadoop-mapred.pom" location="${ivy.dir}/hadoop-mapred.xml"/>
+  <property name="hadoop-mapred-test.pom" location="${ivy.dir}/hadoop-mapred-test.xml"/>
+  <property name="hadoop-mapred-examples.pom" location="${ivy.dir}/hadoop-mapred-examples.xml"/>
+  <property name="hadoop-mapred-tools.pom" location="${ivy.dir}/hadoop-mapred-tools.xml"/>
 
   <!--this is the naming policy for artifacts we want pulled down-->
   <property name="ivy.artifact.retrieve.pattern" value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/>
@@ -193,6 +198,9 @@
   <!--this is how artifacts that get built are named-->
   <property name="ivy.publish.pattern" value="${name}-[revision].[ext]"/>
   <property name="hadoop-mapred.jar" location="${build.dir}/${final.name}.jar" />
+  <property name="hadoop-mapred-test.jar" location="${build.dir}/${test.final.name}.jar" />
+  <property name="hadoop-mapred-examples.jar" location="${build.dir}/${examples.final.name}.jar" />
+  <property name="hadoop-mapred-tools.jar" location="${build.dir}/${tools.final.name}.jar" />
 
   <!-- jdiff.home property set -->
   <property name="jdiff.home" value="${build.ivy.lib.dir}/${ant.project.name}/jdiff"/>
@@ -214,8 +222,6 @@
   <path id="classpath">
     <pathelement location="${build.classes}"/>
     <pathelement location="${conf.dir}"/>
-    <pathelement path="${lib.dir}/hadoop-common-${hadoop-core.version}.jar"/>
-    <pathelement path="${lib.dir}/hadoop-hdfs-${hadoop-hdfs.version}.jar"/>
     <path refid="ivy-common.classpath"/>
   </path>
 
@@ -227,8 +233,6 @@
     <pathelement location="${build.examples}"/>
     <pathelement location="${build.tools}"/>
     <pathelement path="${clover.jar}"/>
-    <pathelement path="${lib.dir}/hadoop-common-test-${hadoop-core.version}.jar"/>
-    <pathelement path="${lib.dir}/hadoop-hdfs-test-${hadoop-hdfs.version}.jar"/>
     <path refid="classpath"/>
     <pathelement location="${test.mapred.build.classes}" />
     <path refid="ivy-test.classpath"/>
@@ -242,8 +246,6 @@
     <pathelement location="${build.dir}"/>
   </path>
 
-  <!-- properties dependent on the items defined above. -->
-  <!--<available classname="${rat.reporting.classname}" classpathref="classpath" property="rat.present" value="true"/> -->
 
   <!-- ====================================================== -->
   <!-- Macro definitions                                      -->
@@ -286,15 +288,18 @@
     <copy todir="${build.webapps}">
       <fileset dir="${src.webapps}">
         <exclude name="**/*.jsp" />
+        <exclude name="**/*.jspx" />
       </fileset>
     </copy>
 
-    <unzip src="${lib.dir}/hadoop-hdfs-${hadoop-hdfs.version}.jar"
-        dest="${build.dir}">
-      <patternset>
-        <include name="webapps/**"/>
-      </patternset>
-    </unzip>
+     <unzip src="${common.ivy.lib.dir}/hadoop-hdfs-${hadoop-hdfs.version}.jar"
+         dest="${build.dir}">
+       <patternset>
+         <include name="webapps/hdfs/**"/>
+         <include name="webapps/datanode/**"/>
+         <include name="webapps/secondary/**"/>
+       </patternset>
+     </unzip>
 
     <copy todir="${conf.dir}" verbose="true">
       <fileset dir="${conf.dir}" includes="**/*.template"/>
@@ -308,7 +313,19 @@
 
   </target>
 
-  <target name="compile-mapred-classes" depends="init">
+  <target name="avro-generate" depends="init">
+    <mkdir dir="${build.src}/org/apache/hadoop/mapreduce/jobhistory"/>
+    <taskdef name="protocol" classname="org.apache.avro.specific.ProtocolTask">
+      <classpath refid="classpath" />
+    </taskdef>
+    <protocol destdir="${build.src}">
+      <fileset dir="${mapred.src.dir}">
+	<include name="**/*.avpr" />
+      </fileset>
+    </protocol>
+  </target>
+
+  <target name="compile-mapred-classes" depends="init,avro-generate">
     <taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
        <classpath refid="classpath"/>
     </taskdef>
@@ -372,7 +389,7 @@
 
   <target name="compile-core" depends="clover, compile-mapred-classes, compile-c++" description="Compile core only"/> 
 
-  <target name="compile-contrib" depends="compile-core,compile-c++-libhdfs">
+  <target name="compile-contrib" depends="compile-core,tools">
      <subant target="compile">
         <property name="version" value="${version}"/>
         <property name="hadoop-core.version" value="${hadoop-core.version}"/>
@@ -404,7 +421,7 @@
   </target>
 
   <!-- ================================================================== -->
-  <!-- Make hadoop-mapred.jar                                                     -->
+  <!-- Make hadoop-mapred.jar                                          -->
   <!-- ================================================================== -->
   <!--                                                                    -->
   <!-- ================================================================== -->
@@ -564,6 +581,7 @@
         dir="${basedir}" timeout="${test.timeout}"
         errorProperty="tests.failed" failureProperty="tests.failed">
         <sysproperty key="test.build.data" value="${test.build.data}"/>
+        <sysproperty key="test.tools.input.dir" value = "${test.tools.input.dir}"/>
         <sysproperty key="test.cache.data" value="${test.cache.data}"/>     
         <sysproperty key="test.debug.data" value="${test.debug.data}"/>
         <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
@@ -575,6 +593,9 @@
         <sysproperty key="java.library.path"
           value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
         <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
+        <syspropertyset dynamic="no">
+          <propertyref name="hadoop.tmp.dir"/>
+        </syspropertyset>
         <!-- set compile.c++ in the child jvm only if it is set -->
         <syspropertyset dynamic="no">
           <propertyref name="compile.c++"/>
@@ -611,7 +632,7 @@
     <fail unless="continueOnFailure">Tests failed!</fail>
   </target>
 
-  <target name="test-contrib" depends="compile,compile-mapred-test" description="Run contrib unit tests">
+  <target name="test-contrib" depends="compile,compile-tools,compile-mapred-test" description="Run contrib unit tests">
     <subant target="test">
        <property name="version" value="${version}"/>
        <property name="clover.jar" value="${clover.jar}"/>
@@ -627,7 +648,7 @@
     <fail if="testsfailed">Tests failed!</fail>
   </target>
 
-  <target name="test" depends="test-c++-libhdfs, jar-test, test-core" description="Run all unit tests">
+  <target name="test" depends="jar-test, test-core" description="Run all unit tests">
     <subant target="test-contrib">
       <fileset file="${basedir}/build.xml"/>
      </subant>
@@ -676,6 +697,8 @@
       <property name="version" value="${version}"/>
       <property name="dist.dir" value="${dist.dir}"/>
       <fileset file="${contrib.dir}/streaming/build.xml"/>
+      <fileset file="${contrib.dir}/gridmix/build.xml"/>
+      <fileset file="${contrib.dir}/mumak/build.xml"/>
     </subant>
  </target>
 
@@ -705,10 +728,14 @@
       <sourcePath path="${examples.dir}" />
       <sourcePath path="${tools.src}" />
       <sourcePath path="${basedir}/src/contrib/streaming/src/java" />
+      <sourcePath path="${basedir}/src/contrib/gridmix/src/java" />
+      <sourcePath path="${basedir}/src/contrib/mumak/src/java" />
       <class location="${basedir}/build/${final.name}.jar" />
       <class location="${basedir}/build/${examples.final.name}.jar" />
       <class location="${basedir}/build/${tools.final.name}.jar" />
       <class location="${basedir}/build/contrib/streaming/hadoop-${version}-streaming.jar" />
+      <class location="${basedir}/build/contrib/gridmix/hadoop-${version}-gridmix.jar" />
+      <class location="${basedir}/build/contrib/mumak/hadoop-${version}-mumak.jar" />
     </findbugs>
 
         <xslt style="${findbugs.home}/src/xsl/default.xsl"
@@ -738,22 +765,6 @@
     <style basedir="${mapred.src.dir}" destdir="${build.docs}"
            includes="mapred-default.xml" style="conf/configuration.xsl"/>
     <antcall target="changes-to-html"/>
-    <antcall target="cn-docs"/>
-  </target>
-
-  <target name="cn-docs" depends="forrest.check, init" 
-       description="Generate forrest-based Chinese documentation. To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." 
-        if="forrest.home">
-    <exec dir="${src.docs.cn}" executable="${forrest.home}/bin/forrest" failonerror="true">
-      <env key="LANG" value="en_US.utf8"/>
-      <env key="JAVA_HOME" value="${java5.home}"/>
-    </exec>
-    <copy todir="${build.docs.cn}">
-      <fileset dir="${src.docs.cn}/build/site/" />
-    </copy>
-    <style basedir="${mapred.src.dir}" destdir="${build.docs.cn}"
-           includes="mapred-default.xml" style="conf/configuration.xsl"/>
-    <antcall target="changes-to-html"/>
   </target>
 
   <target name="forrest.check" unless="forrest.home" depends="java5.check">
@@ -784,6 +795,8 @@
     	<packageset dir="src/contrib/streaming/src/java"/>
     	<packageset dir="src/contrib/data_join/src/java"/>
     	<packageset dir="src/contrib/index/src/java"/>
+    	<packageset dir="src/contrib/gridmix/src/java"/>
+    	<packageset dir="src/contrib/mumak/src/java"/>
 
         <link href="${javadoc.link.java}"/>
 
@@ -794,6 +807,7 @@
           </fileset>
           <path refid="javadoc-classpath"/>
           <pathelement path="${java.class.path}"/>
+          <pathelement path="${lib.dir}/hadoop-core-test-${hadoop-core.version}.jar"/>
           <pathelement location="${build.tools}"/>
         </classpath>
 
@@ -837,6 +851,8 @@
     	<packageset dir="src/contrib/streaming/src/java"/>
     	<packageset dir="src/contrib/data_join/src/java"/>
     	<packageset dir="src/contrib/index/src/java"/>
+    	<packageset dir="src/contrib/gridmix/src/java"/>
+    	<packageset dir="src/contrib/mumak/src/java"/>
 	
         <link href="${javadoc.link.java}"/>
 
@@ -847,6 +863,7 @@
           </fileset>
           <path refid="javadoc-classpath"/>
           <pathelement path="${java.class.path}"/>
+          <pathelement path="${lib.dir}/hadoop-core-test-${hadoop-core.version}.jar"/>
           <pathelement location="${build.tools}"/>
         </classpath>
 
@@ -1093,6 +1110,123 @@
     </macro_tar>
   </target>
 
+  <target name="ant-task-download" description="To download mvn-ant-task">
+    <get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/>
+  </target>
+
+  <target name="mvn-taskdef" depends="ant-task-download">
+     <path id="mvn-ant-task.classpath" path="${ant_task.jar}"/> 
+     <typedef resource="org/apache/maven/artifact/ant/antlib.xml" 
+         uri="urn:maven-artifact-ant"
+         classpathref="mvn-ant-task.classpath"/>
+  </target>   
+
+  <target name="clean-cache" description="Clean. Delete ivy cache">
+    <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-core"/>
+    <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-core-test"/>
+    <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs"/>
+    <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-test"/>
+  </target>
+
+  <target name="mvn-install-mapred" depends="mvn-taskdef,examples,tools,set-version">
+     <artifact:pom file="${hadoop-mapred.pom}" id="hadoop.mapred"/>
+     <artifact:pom file="${hadoop-mapred-examples.pom}" id="hadoop.mapred.examples"/>
+     <artifact:pom file="${hadoop-mapred-tools.pom}" id="hadoop.mapred.tools"/>
+
+     <artifact:install file="${hadoop-mapred.jar}">
+        <pom refid="hadoop.mapred"/>
+     </artifact:install>
+     <artifact:install file="${hadoop-mapred-examples.jar}">
+        <pom refid="hadoop.mapred.examples"/>
+     </artifact:install>
+     <artifact:install file="${hadoop-mapred-tools.jar}">
+        <pom refid="hadoop.mapred.tools"/>
+     </artifact:install>
+  </target>
+
+  <target name="mvn-install" depends="mvn-taskdef,examples,tools,jar-test,set-version">
+     <artifact:pom file="${hadoop-mapred.pom}" id="hadoop.mapred"/>
+     <artifact:pom file="${hadoop-mapred-test.pom}" id="hadoop.mapred.test"/>
+     <artifact:pom file="${hadoop-mapred-examples.pom}" id="hadoop.mapred.examples"/>
+     <artifact:pom file="${hadoop-mapred-tools.pom}" id="hadoop.mapred.tools"/>
+     <artifact:install file="${hadoop-mapred.jar}">
+        <pom refid="hadoop.mapred"/>
+     </artifact:install>
+     <artifact:install file="${hadoop-mapred-test.jar}">
+        <pom refid="hadoop.mapred.test"/>
+     </artifact:install>
+     <artifact:install file="${hadoop-mapred-examples.jar}">
+        <pom refid="hadoop.mapred.examples"/>
+     </artifact:install>
+     <artifact:install file="${hadoop-mapred-tools.jar}">
+        <pom refid="hadoop.mapred.tools"/>
+     </artifact:install>
+  </target>
+
+  <target name="mvn-deploy" depends="mvn-taskdef, examples, tools, jar-test, set-version">
+     <artifact:pom file="${hadoop-mapred.pom}" id="hadoop.mapred"/>
+     <artifact:pom file="${hadoop-mapred-test.pom}" id="hadoop.mapred.test"/>
+     <artifact:pom file="${hadoop-mapred-examples.pom}" id="hadoop.mapred.examples"/>
+     <artifact:pom file="${hadoop-mapred-tools.pom}" id="hadoop.mapred.tools"/>
+
+     <artifact:install-provider artifactId="wagon-http" version="1.0-beta-2"/>
+     <artifact:deploy file="${hadoop-mapred.jar}">
+         <remoteRepository id="apache.snapshots.https" url="${asfrepo}"/>
+         <pom refid="hadoop.mapred"/>
+     </artifact:deploy>
+     <artifact:deploy file="${hadoop-mapred-test.jar}">
+         <remoteRepository id="apache.snapshots.https" url="${asfrepo}"/>
+         <pom refid="hadoop.mapred.test"/>
+     </artifact:deploy>
+     <artifact:deploy file="${hadoop-mapred-examples.jar}">
+         <remoteRepository id="apache.snapshots.https" url="${asfrepo}"/>
+         <pom refid="hadoop.mapred.examples"/>
+     </artifact:deploy>
+     <artifact:deploy file="${hadoop-mapred-tools.jar}">
+         <remoteRepository id="apache.snapshots.https" url="${asfrepo}"/>
+         <pom refid="hadoop.mapred.tools"/>
+     </artifact:deploy>
+  </target>
+  
+  <target name="set-version">
+    <delete file="${basedir}/ivy/hadoop-mapred.xml"/>
+    <delete file="${basedir}/ivy/hadoop-mapred-test.xml"/>
+    <delete file="${basedir}/ivy/hadoop-mapred-examples.xml"/>
+    <delete file="${basedir}/ivy/hadoop-mapred-tools.xml"/>
+    <copy file="${basedir}/ivy/hadoop-mapred-template.xml" tofile="${basedir}/ivy/hadoop-mapred.xml"/>
+    <copy file="${basedir}/ivy/hadoop-mapred-test-template.xml" tofile="${basedir}/ivy/hadoop-mapred-test.xml"/>
+    <copy file="${basedir}/ivy/hadoop-mapred-examples-template.xml" tofile="${basedir}/ivy/hadoop-mapred-examples.xml"/>
+    <copy file="${basedir}/ivy/hadoop-mapred-tools-template.xml" tofile="${basedir}/ivy/hadoop-mapred-tools.xml"/>
+    <replaceregexp byline="true">
+      <regexp pattern="@version"/>
+      <substitution expression="${version}"/>
+      <fileset dir="${basedir}/ivy">
+        <include name="hadoop-mapred.xml"/>
+      </fileset>
+    </replaceregexp>
+    <replaceregexp byline="true">
+      <regexp pattern="@version"/>
+      <substitution expression="${version}"/>
+      <fileset dir="${basedir}/ivy">
+        <include name="hadoop-mapred-test.xml"/>
+      </fileset>
+    </replaceregexp>
+    <replaceregexp byline="true">
+      <regexp pattern="@version"/>
+      <substitution expression="${version}"/>
+      <fileset dir="${basedir}/ivy">
+        <include name="hadoop-mapred-examples.xml"/>
+      </fileset>
+    </replaceregexp>
+    <replaceregexp byline="true">
+      <regexp pattern="@version"/>
+      <substitution expression="${version}"/>
+      <fileset dir="${basedir}/ivy">
+        <include name="hadoop-mapred-tools.xml"/>
+      </fileset>
+    </replaceregexp>
+  </target>
+
   <!-- ================================================================== -->
   <!-- Perform audit activities for the release                           -->
   <!-- ================================================================== -->
@@ -1119,7 +1253,16 @@
   <target name="clean" depends="clean-contrib" description="Clean.  Delete the build files, and their directories">
     <delete dir="${build.dir}"/>
     <delete dir="${docs.src}/build"/>
-    <delete dir="${src.docs.cn}/build"/>
+    <delete file="${hadoop-mapred.pom}"/>
+    <delete file="${hadoop-mapred-test.pom}"/>
+    <delete file="${hadoop-mapred-examples.pom}"/>
+    <delete file="${hadoop-mapred-tools.pom}"/>
+  </target>
+
+  <target name="veryclean" depends="clean-cache,clean" 
+          description="veryclean.  Delete ivy and ant maven task jar">
+    <delete file="${ant_task.jar}"/>
+    <delete file="${ivy.jar}"/>
   </target>
 
   <!-- ================================================================== -->
@@ -1132,26 +1275,6 @@
      </subant>  	
   </target>
 	
- <target name="test-c++-libhdfs" depends="compile-c++-libhdfs, compile-core" if="islibhdfs">
-    <delete dir="${test.libhdfs.dir}"/>
-    <mkdir dir="${test.libhdfs.dir}"/>
-    <mkdir dir="${test.libhdfs.dir}/logs"/>
-    <mkdir dir="${test.libhdfs.dir}/hdfs/name"/>
-
-    <exec dir="${build.c++.libhdfs}" executable="${make.cmd}" failonerror="true">
-        <env key="OS_NAME" value="${os.name}"/>
-        <env key="OS_ARCH" value="${os.arch}"/>
-        <env key="JVM_ARCH" value="${jvm.arch}"/>
-        <env key="LIBHDFS_BUILD_DIR" value="${build.c++.libhdfs}"/>
-        <env key="HADOOP_HOME" value="${basedir}"/>
-        <env key="HADOOP_CONF_DIR" value="${test.libhdfs.conf.dir}"/>
-        <env key="HADOOP_LOG_DIR" value="${test.libhdfs.dir}/logs"/>
-        <env key="LIBHDFS_SRC_DIR" value="${c++.libhdfs.src}"/>
-        <env key="LIBHDFS_INSTALL_DIR" value="${install.c++}/lib"/>  
-        <env key="LIB_DIR" value="${common.ivy.lib.dir}"/>
-		<arg value="test"/>
-    </exec>
-  </target>
 
 <!-- ================================================================== -->
 <!-- librecordio targets.                                               -->
@@ -1204,16 +1327,8 @@
           searchpath="yes" failonerror="yes">
        <arg value="-if"/>
     </exec>
-    <antcall target="create-c++-configure-libhdfs"/>
-  </target>
+   </target>
    
-  <target name="create-c++-configure-libhdfs" depends="check-c++-libhdfs" if="islibhdfs">
-    <exec executable="autoreconf" dir="${c++.libhdfs.src}" 
-          searchpath="yes" failonerror="yes">
-       <arg value="-if"/>
-    </exec>
-  </target>
-
   <target name="check-c++-makefiles" depends="init" if="compile.c++">
     <condition property="need.c++.utils.makefile">
        <not> <available file="${build.c++.utils}/Makefile"/> </not>
@@ -1226,33 +1341,6 @@
     </condition>
   </target>
 
-  <target name="check-c++-libhdfs">
-    <condition property="islibhdfs">
-      <and>
-        <isset property="compile.c++"/>
-        <isset property="libhdfs"/>
-      </and>
-    </condition>
-  </target>
-
-  <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs" if="islibhdfs">
-    <condition property="need.c++.libhdfs.makefile">
-       <not> <available file="${build.c++.libhdfs}/Makefile"/> </not>
-    </condition>
-  </target>
-
-  <target name="create-c++-libhdfs-makefile" depends="check-c++-makefile-libhdfs" 
-                                           if="need.c++.libhdfs.makefile">
-    <mkdir dir="${build.c++.libhdfs}"/>
-    <chmod file="${c++.libhdfs.src}/configure" perm="ugo+x"/>
-    <exec executable="${c++.libhdfs.src}/configure" dir="${build.c++.libhdfs}"
-          failonerror="yes">
-      <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
-      <env key="JVM_ARCH" value="${jvm.arch}"/>
-      <arg value="--prefix=${install.c++}"/>
-    </exec>
-  </target>
-
   <target name="create-c++-utils-makefile" depends="check-c++-makefiles" 
                                            if="need.c++.utils.makefile">
     <mkdir dir="${build.c++.utils}"/>
@@ -1319,15 +1407,6 @@
   <target name="compile-c++-examples" 
           depends="compile-c++-examples-pipes"/>
 
-  <target name="compile-c++-libhdfs" depends="create-c++-libhdfs-makefile" if="islibhdfs">
-    <exec executable="${make.cmd}" dir="${build.c++.libhdfs}" searchpath="yes"
-          failonerror="yes">
-      <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
-      <env key="JVM_ARCH" value="${jvm.arch}"/>
-      <arg value="install"/>
-    </exec>
-  </target>
-
  <target name="clover" depends="clover.setup, clover.info" description="Instrument the Unit tests using Clover.  To use, specify -Dclover.home=&lt;base of clover installation&gt; -Drun.clover=true on the command line."/>
 
 <target name="clover.setup" if="clover.enabled">
@@ -1473,6 +1552,9 @@
   </target>
 
 
+  <property name="ivyresolvelog" value="download-only"/>
+  <property name="ivyretrievelog" value="quite"/>
+
   <target name="ivy-init" depends="ivy-init-antlib" >
 
     <!--Configure Ivy by reading in the settings file
@@ -1482,78 +1564,92 @@
   </target>
 
   <target name="ivy-resolve" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings"/>
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings"
+      log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-javadoc" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"/>
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="javadoc"
+      log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-releaseaudit" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"/>
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="releaseaudit"
+      log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-test" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test" />
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="test"
+      log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-common" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common" />
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="common"
+      log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-jdiff" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff" />
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff" 
+      log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-resolve-checkstyle" depends="ivy-init">
-    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"/>
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="checkstyle"
+      log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-retrieve" depends="ivy-resolve"
     description="Retrieve Ivy-managed artifacts">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+        log="${ivyresolvelog}"/>
   </target>
 
   <target name="ivy-retrieve-checkstyle" depends="ivy-resolve-checkstyle"
     description="Retrieve Ivy-managed artifacts for the checkstyle configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+        log="${ivyresolvelog}"/>
     <ivy:cachepath pathid="checkstyle-classpath" conf="checkstyle"/>
   </target>
 
   <target name="ivy-retrieve-jdiff" depends="ivy-resolve-jdiff"
     description="Retrieve Ivy-managed artifacts for the javadoc configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+        log="${ivyresolvelog}"/>
     <ivy:cachepath pathid="jdiff-classpath" conf="jdiff"/>
   </target>
 
   <target name="ivy-retrieve-javadoc" depends="ivy-resolve-javadoc"
     description="Retrieve Ivy-managed artifacts for the javadoc configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+        log="${ivyresolvelog}"/>
     <ivy:cachepath pathid="javadoc-classpath" conf="javadoc"/>
   </target>
 
   <target name="ivy-retrieve-test" depends="ivy-resolve-test"
     description="Retrieve Ivy-managed artifacts for the test configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+        log="${ivyresolvelog}"/>
     <ivy:cachepath pathid="ivy-test.classpath" conf="test"/>
   </target>
 
   <target name="ivy-retrieve-common" depends="ivy-resolve-common"
     description="Retrieve Ivy-managed artifacts for the compile configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+        log="${ivyresolvelog}"/>
     <ivy:cachepath pathid="ivy-common.classpath" conf="common"/>
   </target>
 
   <target name="ivy-retrieve-releaseaudit" depends="ivy-resolve-releaseaudit"
     description="Retrieve Ivy-managed artifacts for the compile configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
-      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" />
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+        log="${ivyresolvelog}"/>
     <ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
   </target>
 
@@ -1565,72 +1661,6 @@
     </echo>
   </target>
 
-  <target name="assert-hadoop-jar-exists" depends="ivy-init">
-    <fail>
-      <condition >
-        <not>
-          <available file="${hadoop-mapred.jar}" />
-        </not>
-      </condition>
-      Not found: ${hadoop-mapred.jar}
-      Please run the target "jar" in the main build file
-    </fail>
-
-  </target>
-
-  <target name="ready-to-publish" depends="jar,assert-hadoop-jar-exists,ivy-resolve"/>
-
-  <target name="ivy-publish-local" depends="ready-to-publish,ivy-resolve">
-    <ivy:publish
-      settingsRef="${ant.project.name}.ivy.settings"
-      resolver="local"
-      pubrevision="${version}"
-      overwrite="true"
-      artifactspattern="${build.dir}/${ivy.publish.pattern}" />
-  </target>
-
-
-  <!-- this is here for curiosity, to see how well the makepom task works
-  Answer: it depends whether you want transitive dependencies excluded or not
-  -->
-  <target name="makepom" depends="ivy-resolve">
-    <ivy:makepom settingsRef="${ant.project.name}.ivy.settings"
-      ivyfile="ivy.xml"
-      pomfile="${build.ivy.maven.dir}/generated.pom">
-      <ivy:mapping conf="default" scope="default"/>
-      <ivy:mapping conf="master" scope="master"/>
-      <ivy:mapping conf="runtime" scope="runtime"/>
-    </ivy:makepom>
-  </target>
-
-
-  <target name="copy-jar-to-maven" depends="ready-to-publish">
-    <copy file="${hadoop-mapred.jar}"
-      tofile="${build.ivy.maven.jar}"/>
-    <checksum file="${build.ivy.maven.jar}" algorithm="md5"/>
-  </target>
-
-  <target name="copypom" depends="ivy-init-dirs">
-
-   <presetdef name="expandingcopy" >
-    <copy overwrite="true">
-      <filterchain>
-        <expandproperties/>
-      </filterchain>
-    </copy>
-   </presetdef>
-
-   <expandingcopy file="ivy/hadoop-mapred.pom"
-      tofile="${build.ivy.maven.pom}"/>
-   <checksum file="${build.ivy.maven.pom}" algorithm="md5"/>
-  </target>
-
-  <target name="maven-artifacts" depends="copy-jar-to-maven,copypom" />
-
-  <target name="published" depends="ivy-publish-local,maven-artifacts">
-
-  </target>
-
   <!-- taskcontroller targets -->
   <target name="init-task-controller-build">
     <mkdir dir="${build.c++.task-controller}" />

Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/conf/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Nov 28 20:26:01 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/conf:713112
 /hadoop/core/trunk/conf:784664-785643
-/hadoop/mapreduce/trunk/conf:804974-807678
+/hadoop/mapreduce/trunk/conf:804974-884916

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/conf/capacity-scheduler.xml.template
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/conf/capacity-scheduler.xml.template?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/conf/capacity-scheduler.xml.template (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/conf/capacity-scheduler.xml.template Sat Nov 28 20:26:01 2009
@@ -14,6 +14,29 @@
       to be available for jobs in this queue.
     </description>    
   </property>
+
+  <property>
+    <name>mapred.capacity-scheduler.queue.default.subQueues</name>
+    <value></value>
+    <description>Sub-queues are queues configured within queues. 
+       They provide a mechanism for administrators to link logically related queues
+       Sub-queues can be nested. So there can be queues within a sub-queue.
+    </description>    
+  </property>
+
+  <property>
+    <name>mapred.capacity-scheduler.queue.default.maximum-capacity</name>
+    <value>-1</value>
+    <description>
+	maximum-capacity-stretch defines a limit beyond which a sub-queue cannot use the capacity of its parent queue.
+	This provides a means to limit how much excess capacity a sub-queue can use. By default, there is no limit.
+	The maximum-capacity-stretch of a queue can only be greater than or equal to its minimum capacity.
+        Default value of 100 implies , sub-queue can use complete capacity of its parent.
+        This property could be to curtail certain jobs which are long running in nature from occupying more than a 
+        certain percentage of the cluster, which in the absence of pre-emption, could lead to capacity guarantees of 
+        other queues being affected.
+    </description>    
+  </property>
   
   <property>
     <name>mapred.capacity-scheduler.queue.default.supports-priority</name>
@@ -46,44 +69,6 @@
     </description>
   </property>
 
-<property>
-  <name>mapred.capacity-scheduler.queue.default.max.map.slots</name>
-  <value>-1</value>
-  <description>
-    This value is the maximum map slots that can be used in a
-    queue at any point of time. So for example assuming above config value
-    is 100 , not more than 100 tasks would be in the queue at any point of
-    time, assuming each task takes one slot.
-
-    Default value of -1 would disable this capping feature
-
-    Typically the queue capacity should be equal to this limit.
-    If queue capacity is more than this limit, excess capacity will be
-    used by the other queues. If queue capacity is less than the above
-    limit , then the limit would be the queue capacity - as in the current
-    implementation
-  </description>
-</property>
-
-<property>
-  <name>mapred.capacity-scheduler.queue.default.max.reduce.slots</name>
-  <value>-1</value>
-  <description>
-    This value is the maximum reduce slots that can be used in a
-    queue at any point of time. So for example assuming above config value
-      is 100 , not more than 100 reduce tasks would be in the queue at any point
-      of time, assuming each task takes one slot.
-
-    Default value of -1 would disable this capping feature
-
-    Typically the queue capacity should be equal to this limit.
-    If queue capacity is more than this limit, excess capacity will be
-    used by the other queues. If queue capacity is less than the above
-    limit , then the limit would be the queue capacity - as in the current
-    implementation
-  </description>
-</property>
-  
   <!-- The default configuration settings for the capacity task scheduler -->
   <!-- The default values would be applied to all the queues which don't have -->
   <!-- the appropriate property for the particular queue -->

Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/conf/capacity-scheduler.xml.template
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sat Nov 28 20:26:01 2009
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/conf/capacity-scheduler.xml.template:713112
 /hadoop/core/trunk/conf/capacity-scheduler.xml.template:776175-785643
-/hadoop/mapreduce/trunk/conf/capacity-scheduler.xml.template:804974-807678
+/hadoop/mapreduce/trunk/conf/capacity-scheduler.xml.template:804974-884916

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/conf/mapred-queues.xml.template
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/conf/mapred-queues.xml.template?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/conf/mapred-queues.xml.template (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/conf/mapred-queues.xml.template Sat Nov 28 20:26:01 2009
@@ -1,68 +1,56 @@
 <?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<!-- This is a template file for queue acls configuration properties -->
-
-<configuration>
-
-<property>
-  <name>mapred.queue.names</name>
-  <value>default</value>
-  <description> Comma separated list of queues configured for this jobtracker.
-    Jobs are added to queues and schedulers can configure different 
-    scheduling properties for the various queues. To configure a property 
-    for a queue, the name of the queue must match the name specified in this 
-    value. Queue properties that are common to all schedulers are configured 
-    here with the naming convention, mapred.queue.$QUEUE-NAME.$PROPERTY-NAME,
-    for e.g. mapred.queue.default.submit-job-acl.
-    The number of queues configured in this parameter could depend on the
-    type of scheduler being used, as specified in 
-    mapred.jobtracker.taskScheduler. For example, the JobQueueTaskScheduler
-    supports only a single queue, which is the default configured here.
-    Before adding more queues, ensure that the scheduler you've configured
-    supports multiple queues.
-  </description>
-</property>
-
-<property>
-  <name>mapred.acls.enabled</name>
-  <value>false</value>
-  <description> Specifies whether ACLs are enabled, and should be checked
-    for various operations.
-  </description>
-</property>
-
-<property>
-  <name>mapred.queue.default.acl-submit-job</name>
-  <value>*</value>
-  <description> Comma separated list of user and group names that are allowed
-    to submit jobs to the 'default' queue. The user list and the group list
-    are separated by a blank. For e.g. alice,bob group1,group2. 
-    If set to the special value '*', it means all users are allowed to 
-    submit jobs. 
-  </description>
-</property>
-
-<property>
-  <name>mapred.queue.default.acl-administer-jobs</name>
-  <value>*</value>
-  <description> Comma separated list of user and group names that are allowed
-    to delete jobs or modify job's priority for jobs not owned by the current
-    user in the 'default' queue. The user list and the group list
-    are separated by a blank. For e.g. alice,bob group1,group2. 
-    If set to the special value '*', it means all users are allowed to do 
-    this operation.
-  </description>
-</property>
-
-<property>
-  <name>mapred.queue.default.state</name>
-  <value>running</value>
-  <description>
-   This values defines the state , default queue is in.
-   the values can be either "stopped" or "running"
-   This value can be changed at runtime.
-  </description>
-</property>
-
-</configuration>
+<!-- This is the template for queue configuration. The format supports nesting of
+     queues within queues - a feature called hierarchical queues. All queues are
+     defined within the 'queues' tag which is the top level element for this
+     XML document.
+     The 'aclsEnabled' attribute should be set to true, if ACLs should be checked
+     on queue operations such as submitting jobs, killing jobs etc. -->
+<queues aclsEnabled="false">
+
+  <!-- Configuration for a queue is specified by defining a 'queue' element. -->
+  <queue>
+
+    <!-- Name of a queue. Queue name cannot contain a ':'  -->
+    <name>default</name>
+
+    <!-- properties for a queue, typically used by schedulers,
+    can be defined here -->
+    <properties>
+    </properties>
+
+	<!-- State of the queue. If running, the queue will accept new jobs.
+         If stopped, the queue will not accept new jobs. -->
+    <state>running</state>
+
+    <!-- Specifies the ACLs to check for submitting jobs to this queue.
+         If set to '*', it allows all users to submit jobs to the queue.
+         For specifying a list of users and groups the format to use is
+         user1,user2 group1,group2 -->
+    <acl-submit-job>*</acl-submit-job>
+
+    <!-- Specifies the ACLs to check for modifying jobs in this queue.
+         Modifications include killing jobs, tasks of jobs or changing
+         priorities.
+         If set to '*', it allows all users to submit jobs to the queue.
+         For specifying a list of users and groups the format to use is
+         user1,user2 group1,group2 -->
+    <acl-administer-jobs>*</acl-administer-jobs>
+  </queue>
+
+  <!-- Here is a sample of a hierarchical queue configuration
+       where q2 is a child of q1. In this example, q2 is a leaf level
+       queue as it has no queues configured within it. Currently, ACLs
+       and state are only supported for the leaf level queues.
+       Note also the usage of properties for the queue q2.
+  <queue>
+    <name>q1</name>
+    <queue>
+      <name>q2</name>
+      <properties>
+        <property key="capacity" value="20"/>
+        <property key="user-limit" value="30"/>
+      </properties>
+    </queue>
+  </queue>
+ -->
+</queues>

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/conf/taskcontroller.cfg
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/conf/taskcontroller.cfg?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/conf/taskcontroller.cfg (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/conf/taskcontroller.cfg Sat Nov 28 20:26:01 2009
@@ -1,2 +1,2 @@
-mapred.local.dir=#configured value of mapred.local.dir. It can be a list of comma separated paths.
+mapreduce.cluster.local.dir=#configured value of mapreduce.cluster.local.dir. It can be a list of comma separated paths.
 hadoop.log.dir=#configured value of hadoop.log.dir.
\ No newline at end of file

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/ivy.xml?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/ivy.xml (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/ivy.xml Sat Nov 28 20:26:01 2009
@@ -27,50 +27,24 @@
     <!--these match the Maven configurations-->
     <conf name="default" extends="master,runtime"/>
     <conf name="master" description="contains the artifact but no dependencies"/>
-    <conf name="runtime" description="runtime but not the artifact"
-      extends="client,server,s3-server,kfs"/>
-
-    <conf name="mandatory" description="contains the critical  dependencies"
-      extends="commons-logging,log4j"/>
+    <conf name="compile" description="contains the artifact but no dependencies"/>
+    <conf name="runtime" description="runtime but not the artifact"/>
 
     <!--
     These public configurations contain the core dependencies for running hadoop client or server.
     The server is effectively a superset of the client.
     -->
-    <conf name="client" description="client-side dependencies"
-      extends="mandatory,httpclient"/>
-    <conf name="server" description="server-side dependencies"
-      extends="client"/>
-    <conf name="s3-client" description="dependencies for working with S3/EC2 infrastructure"
-      extends="client"/>
-    <conf name="s3-server" description="dependencies for running on S3/EC2 infrastructure"
-      extends="s3-client,server"/>
-    <conf name="kfs" description="dependencies for KFS file system support"/>
-    <conf name="ftp" description="dependencies for workign with FTP filesytems"
-              extends="mandatory"/>
-   <conf name="jetty" description="Jetty provides the in-VM HTTP daemon" extends="commons-logging"/>
-
     <!--Private configurations. -->
 
-    <conf name="common" visibility="private" extends="runtime,mandatory,httpclient,ftp,jetty"
-		      description="common artifacts"/>
-    <conf name="javadoc" visibility="private" description="artiracts required while performing doc generation"
-      extends="common,mandatory,jetty,lucene"/>
-    <!--Testing pulls in everything-->
-    <conf name="test" extends="common,s3-server,kfs" visibility="private"
-      description="the classpath needed to run tests"/>
+    <conf name="common" visibility="private" extends="compile,runtime" description="common artifacts"/>
+    <conf name="javadoc" visibility="private" description="artiracts required while performing doc generation" extends="common"/>
+    <conf name="test" extends="common" visibility="private" description="the classpath needed to run tests"/>
 
-    <conf name="test-hdfswithmr" extends="test" visibility="private"
-      description="the classpath needed to run tests"/>
+    <conf name="test-hdfswithmr" extends="test" visibility="private" description="the classpath needed to run tests"/>
 
-    <conf name="releaseaudit" visibility="private"
-	description="Artifacts required for releaseaudit target"/>
+    <conf name="releaseaudit" visibility="private" description="Artifacts required for releaseaudit target"/>
      
-    <conf name="commons-logging" visibility="private"/>
-    <conf name="httpclient" visibility="private" extends="commons-logging"/>
-    <conf name="log4j" visibility="private"/>
-    <conf name="lucene" visibility="private"/>
-    <conf name="jdiff" visibility="private" extends="log4j,s3-client,jetty,server"/>
+    <conf name="jdiff" visibility="private" extends="common"/>
     <conf name="checkstyle" visibility="private"/>
 
   </configurations>
@@ -79,208 +53,47 @@
     <!--get the artifact from our module name-->
     <artifact conf="master"/>
   </publications>
-  <dependencies>
-
- <!--used client side-->
-    <dependency org="commons-cli"
-      name="commons-cli"
-      rev="${commons-cli.version}"
-      conf="client->default"/>
-    <dependency org="checkstyle"
-      name="checkstyle"
-      rev="${checkstyle.version}"
-      conf="checkstyle->default"/>
-    <dependency org="jdiff"
-      name="jdiff"
-      rev="${jdiff.version}"
-      conf="jdiff->default"/>
-    <dependency org="xerces"
-      name="xerces"
-      rev="${xerces.version}"
-      conf="jdiff->default">
-    </dependency>
-
-    <dependency org="xmlenc"
-      name="xmlenc"
-      rev="${xmlenc.version}"
-      conf="server->default"/>
-
-    <!--Configuration: httpclient-->
-
-    <!--
-    commons-httpclient asks for too many files.
-    All it needs is commons-codec and commons-logging JARs
-    -->
-    <dependency org="commons-httpclient"
-      name="commons-httpclient"
-      rev="${commons-httpclient.version}"
-      conf="httpclient->master">
-    </dependency>
-
-    <dependency org="commons-codec"
-      name="commons-codec"
-      rev="${commons-codec.version}"
-      conf="httpclient->default"/>
-
-    <dependency org="commons-net"
-      name="commons-net"
-      rev="${commons-net.version}"
-      conf="ftp->default"/>
-
-    <!--Configuration: Jetty -->
-
-<!-- <dependency org="javax.servlet"
-      name="servlet-api"
-      rev="${servlet-api.version}"
-      conf="jetty->master"/>   -->
-    <dependency org="org.mortbay.jetty"
-      name="jetty"
-      rev="${jetty.version}"
-      conf="jetty->master"/>
-    <dependency org="org.mortbay.jetty"
-      name="jetty-util"
-      rev="${jetty-util.version}"
-      conf="jetty->master"/>
-
-    <dependency org="tomcat"
-      name="jasper-runtime"
-      rev="${jasper.version}"
-      conf="jetty->master"/>
-    <dependency org="tomcat"
-      name="jasper-compiler"
-      rev="${jasper.version}"
-      conf="jetty->master"/>
-    <dependency org="org.mortbay.jetty"
-      name="jsp-api-2.1"
-      rev="${jetty.version}"
-      conf="jetty->master"/>
-    <dependency org="org.mortbay.jetty"
-      name="jsp-2.1"
-      rev="${jetty.version}"
-      conf="jetty->master"/>
-    <dependency org="commons-el"
-      name="commons-el"
-      rev="${commons-el.version}"
-      conf="jetty->master"/>
-
-
-    <!--Configuration: commons-logging -->
-
-    <!--it is essential that only the master JAR of commons logging
-    is pulled in, as its dependencies are usually a mess, including things
-    like out of date servlet APIs, bits of Avalon, etc.
-    -->
-    <dependency org="commons-logging"
-      name="commons-logging"
-      rev="${commons-logging.version}"
-      conf="commons-logging->master"/>
-
-
-    <!--Configuration: commons-logging -->
-
-    <!--log4J is not optional until commons-logging.properties is stripped out of the JAR -->
-    <dependency org="log4j"
-      name="log4j"
-      rev="${log4j.version}"
-      conf="log4j->master"/>
-
-    <!--Configuration: s3-client -->
-    <!--there are two jets3t projects in the repository; this one goes up to 0.6 and
-    is assumed to be the live one-->
-    <dependency org="net.java.dev.jets3t"
-      name="jets3t"
-      rev="${jets3t.version}"
-      conf="s3-client->master"/>
-    <dependency org="commons-net"
-      name="commons-net"
-      rev="${commons-net.version}"
-      conf="s3-client->master"/> 
-    <dependency org="org.mortbay.jetty"
-      name="servlet-api-2.5"
-      rev="${servlet-api-2.5.version}"
-      conf="s3-client->master"/>
-    <dependency org="net.sf.kosmosfs"
-      name="kfs"
-      rev="${kfs.version}"
-      conf="kfs->default"/>
-
-    <!--Configuration: test -->
-    <!--artifacts needed for testing -->
-
-    <dependency org="org.apache.ftpserver"
-      name="ftplet-api"
-      rev="${ftplet-api.version}"
-      conf="test->default"/>
-    <dependency org="org.apache.mina"
-      name="mina-core"
-      rev="${mina-core.version}"
-      conf="test->default"/>
-    <dependency org="org.apache.ftpserver"
-      name="ftpserver-core"
-      rev="${ftpserver-core.version}"
-      conf="test->default"/>
-    <dependency org="org.apache.ftpserver"
-      name="ftpserver-deprecated"
-      rev="${ftpserver-deprecated.version}"
-      conf="test->default"/>
-
-    <dependency org="junit"
-      name="junit"
-      rev="${junit.version}"
-      conf="common->default"/>
-    <dependency org="org.apache.rat"
-      name="apache-rat-tasks"
-      rev="${rats-lib.version}"
-      conf="releaseaudit->default"/>
-    <dependency org="commons-lang"
-      name="commons-lang"
-      rev="${commons-lang.version}"
-      conf="releaseaudit->default"/>
-    <dependency org="commons-collections"
-      name="commons-collections"
-      rev="${commons-collections.version}"
-      conf="releaseaudit->default"/>
-    <dependency org="hsqldb"
-      name="hsqldb"
-      rev="${hsqldb.version}"
-      conf="common->default"/>
-    <dependency org="org.apache.lucene"
-      name="lucene-core"
-      rev="${lucene-core.version}"
-      conf="javadoc->default"/> 
-    <dependency org="commons-logging"
-      name="commons-logging-api"
-      rev="${commons-logging-api.version}"
-      conf="common->default"/>
-    <dependency org="org.slf4j"
-      name="slf4j-api"
-      rev="${slf4j-api.version}"
-      conf="common->master"/>
-    <dependency org="org.eclipse.jdt"
-      name="core"
-      rev="${core.version}"
-      conf="common->master"/>
-    <dependency org="oro"
-      name="oro"
-      rev="${oro.version}"
-      conf="common->default"/>
-    <dependency org="org.slf4j"
-      name="slf4j-log4j12"
-      rev="${slf4j-log4j12.version}"
-      conf="common->master">
-    </dependency>
-    <dependency org="org.apache.hadoop"
-      name="avro"
-      rev="1.0.0"
-      conf="common->default"/>
-    <dependency org="org.codehaus.jackson"
-      name="jackson-mapper-asl"
-      rev="1.0.1"
-      conf="common->default"/>
-    <dependency org="com.thoughtworks.paranamer"
-      name="paranamer"
-      rev="1.5"
-      conf="common->default"/>
-    </dependencies>
+ <dependencies>
+   <dependency org="org.apache.hadoop" name="hadoop-core" 
+               rev="${hadoop-core.version}" conf="common->default"/> 
+   <dependency org="org.apache.hadoop" name="hadoop-core-test" 
+               rev="${hadoop-core.version}" conf="common->default"/> 
+   <dependency org="org.apache.hadoop" name="hadoop-hdfs" 
+               rev="${hadoop-hdfs.version}" conf="common->default"/> 
+   <dependency org="commons-logging" name="commons-logging" 
+               rev="${commons-logging.version}" conf="common->master"/>
+   <dependency org="log4j" name="log4j" rev="${log4j.version}" 
+               conf="common->master"/>
+
+   <dependency org="org.slf4j" name="slf4j-api" rev="${slf4j-api.version}" 
+               conf="test->master"/>
+   <dependency org="org.slf4j" name="slf4j-log4j12" 
+               rev="${slf4j-log4j12.version}" conf="test->master"/>
+   <dependency org="org.apache.hadoop" name="hadoop-core-test" 
+               rev="${hadoop-core.version}" conf="test->default"/>
+   <dependency org="org.apache.hadoop" name="hadoop-hdfs-test" 
+               rev="${hadoop-hdfs.version}" conf="test->default"/>
+
+   <dependency org="checkstyle" name="checkstyle" rev="${checkstyle.version}"
+               conf="checkstyle->default"/>
+
+   <dependency org="jdiff" name="jdiff" rev="${jdiff.version}"
+               conf="jdiff->default"/>
+   <dependency org="xerces" name="xerces" rev="${xerces.version}"
+               conf="jdiff->default"/>
+
+   <dependency org="org.apache.rat" name="apache-rat-tasks" 
+               rev="${rats-lib.version}" conf="releaseaudit->default"/>
+   <dependency org="commons-lang" name="commons-lang" 
+               rev="${commons-lang.version}" conf="releaseaudit->default"/>
+   <dependency org="commons-collections" name="commons-collections" 
+               rev="${commons-collections.version}" 
+               conf="releaseaudit->default"/>
+
+   <dependency org="org.apache.lucene" name="lucene-core" 
+               rev="${lucene-core.version}" conf="javadoc->default"/>
+   <dependency org="org.apache.hadoop" name="avro" rev="${avro.version}" 
+               conf="common->default"/>
+ </dependencies>
   
 </ivy-module>

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/ivy/ivysettings.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/ivy/ivysettings.xml?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/ivy/ivysettings.xml (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/ivy/ivysettings.xml Sat Nov 28 20:26:01 2009
@@ -29,53 +29,40 @@
           http://ibiblio.lsu.edu/main/pub/packages/maven2
           http://www.ibiblio.net/pub/packages/maven2
   -->
-  <property name="repo.maven.org"
-    value="http://repo1.maven.org/maven2/"
-    override="false"/>
-  <property name="snapshot.apache.org"
-    value="http://people.apache.org/repo/m2-snapshot-repository/"
-    override="false"/>
-  <property name="maven2.pattern"
-    value="[organisation]/[module]/[revision]/[module]-[revision]"/>
-  <property name="maven2.pattern.ext"
-    value="${maven2.pattern}.[ext]"/>
-  <!-- pull in the local repository -->
-  <include url="${ivy.default.conf.dir}/ivyconf-local.xml"/>
-  <settings defaultResolver="default"/>
+  <property name="repo.maven.org" value="http://repo1.maven.org/maven2/" override="false"/>
+  <property name="snapshot.apache.org" value="https://repository.apache.org/content/repositories/snapshots/" override="false"/>
+  <property name="maven2.pattern" value="[organisation]/[module]/[revision]/[module]-[revision]"/>
+  <property name="repo.dir" value="${user.home}/.m2/repository"/>
+  <property name="maven2.pattern.ext"  value="${maven2.pattern}.[ext]"/>
+  <property name="resolvers" value="default" override="false"/>
+  <settings defaultResolver="${resolvers}"/>
+
   <resolvers>
-    <ibiblio name="maven2"
-      root="${repo.maven.org}"
-      pattern="${maven2.pattern.ext}"
-      m2compatible="true"
-      />
-    <ibiblio name="apache-snapshot"
-      root="${snapshot.apache.org}"
-      pattern="${maven2.pattern.ext}"
-      m2compatible="true"
-      />
+    <ibiblio name="maven2" root="${repo.maven.org}" pattern="${maven2.pattern.ext}" m2compatible="true"/>
+    <ibiblio name="apache-snapshot" root="${snapshot.apache.org}" m2compatible="true"/>
+
+    <filesystem name="fs" m2compatible="true" force="true">
+       <artifact pattern="${repo.dir}/org/apache/hadoop/[module]/[revision]/[module]-[revision].[ext]"/>
+       <ivy pattern="${repo.dir}/org/apache/hadoop/[module]/[revision]/[module]-[revision].pom"/>
+    </filesystem>
+
     <chain name="default" dual="true">
-      <resolver ref="local"/>
+      <resolver ref="apache-snapshot"/> 
       <resolver ref="maven2"/>
     </chain>
-    <chain name="internal">
-      <resolver ref="local"/>
-    </chain>
-    <chain name="external">
+
+    <chain name="internal" dual="true">
+      <resolver ref="fs"/>
+      <resolver ref="apache-snapshot"/> 
       <resolver ref="maven2"/>
     </chain>
-    <chain name="external-and-snapshots">
+
+    <chain name="external">
       <resolver ref="maven2"/>
-      <resolver ref="apache-snapshot"/>
     </chain>
+
   </resolvers>
   <modules>
-    <!--
-    This forces a requirement for other hadoop-artifacts to be built locally
-    rather than look for them online.
-
-    -->
-    <module organisation="org.apache.hadoop" name="hadoop*" resolver="internal"/>
-    <!--until commons cli is external, we need to pull it in from the snapshot repository -if present -->
-    <module organisation="org.apache.commons" name=".*" resolver="external-and-snapshots"/>
+     <module organisation="org.apache.hadoop" name="hadoop-*" resolver="${resolvers}"/>
   </modules>
 </ivysettings>

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/ivy/libraries.properties?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/ivy/libraries.properties (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/ivy/libraries.properties Sat Nov 28 20:26:01 2009
@@ -14,8 +14,10 @@
 #It drives ivy and the generation of a maven POM
 
 #These are the versions of our dependencies (in alphabetical order)
-apacheant.version=1.7.0
-avro.version=1.0.0
+apacheant.version=1.7.1
+ant-task.version=2.0.10
+avro.version=1.2.0
+
 checkstyle.version=4.2
 
 commons-cli.version=1.2
@@ -36,11 +38,16 @@
 ftpserver-core.version=1.0.0
 ftpserver-deprecated.version=1.0.0-M2
 
+hadoop-core.version=0.22.0-SNAPSHOT
+hadoop-hdfs.version=0.22.0-SNAPSHOT
+
 hsqldb.version=1.8.0.10
 
 #ivy.version=2.0.0-beta2
 ivy.version=2.0.0-rc2
 jackson-mapper-asl.version=1.0.1paranamer.version=1.5
+jackson.version=1.0.1
+
 jasper.version=5.5.12
 jsp.version=2.1
 jsp-api.version=5.5.12
@@ -49,7 +56,6 @@
 jetty-util.version=6.1.14
 junit.version=4.5
 jdiff.version=1.0.9
-json.version=1.0
 
 kfs.version=0.3
 
@@ -60,6 +66,8 @@
 
 oro.version=2.0.8
 paranamer.version=1.5
+paranamer.version=1.5
+
 rats-lib.version=0.6
 
 servlet.version=4.0.6



Mime
View raw message