hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sha...@apache.org
Subject svn commit: r816664 [1/9] - in /hadoop/mapreduce/trunk: ./ conf/ src/benchmarks/gridmix/ src/benchmarks/gridmix/pipesort/ src/benchmarks/gridmix2/ src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/ src/c++/pipes/impl/ src/c++/task-controller...
Date Fri, 18 Sep 2009 15:10:02 GMT
Author: sharad
Date: Fri Sep 18 15:09:48 2009
New Revision: 816664

URL: http://svn.apache.org/viewvc?rev=816664&view=rev
Log:
MAPREDUCE-849. Rename configuration properties. Contributed by Amareshwari Sriramadasu.

Added:
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/MRConfig.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/server/jobtracker/JTConfig.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/server/tasktracker/TTConfig.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/util/ConfigUtil.java
Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/conf/taskcontroller.cfg
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/generateData.sh
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.large
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.medium
    hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.small
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/generateGridmix2data.sh
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/CombinerJobCreator.java
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GenericMRLoadJobCreator.java
    hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GridMixRunner.java
    hadoop/mapreduce/trunk/src/c++/pipes/impl/HadoopPipes.cc
    hadoop/mapreduce/trunk/src/c++/task-controller/task-controller.c
    hadoop/mapreduce/trunk/src/c++/task-controller/task-controller.h
    hadoop/mapreduce/trunk/src/c++/task-controller/tests/test-task-controller.c
    hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/MemoryMatcher.java
    hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java
    hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java
    hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacitySchedulerWithJobTracker.java
    hadoop/mapreduce/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.java
    hadoop/mapreduce/trunk/src/contrib/dynamic-scheduler/README
    hadoop/mapreduce/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/ConfProp.java
    hadoop/mapreduce/trunk/src/contrib/fairscheduler/designdoc/fair_scheduler_design_doc.tex
    hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java
    hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java
    hadoop/mapreduce/trunk/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSubmission.java
    hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/main/UpdateIndex.java
    hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdateConfiguration.java
    hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdater.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrAppReduce.java
    hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/distcp.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/fair_scheduler.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/streaming.xml
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/BaileyBorweinPlouffe.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/Grep.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/Join.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomTextWriter.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/Sort.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/dancing/Pentomino.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/pi/DistSum.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraInputFormat.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraOutputFormat.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/TeraValidate.java
    hadoop/mapreduce/trunk/src/examples/org/apache/hadoop/examples/terasort/package.html
    hadoop/mapreduce/trunk/src/examples/pipes/conf/word-part.xml
    hadoop/mapreduce/trunk/src/examples/pipes/conf/word.xml
    hadoop/mapreduce/trunk/src/examples/pipes/impl/sort.cc
    hadoop/mapreduce/trunk/src/examples/pipes/impl/wordcount-nopipe.cc
    hadoop/mapreduce/trunk/src/java/mapred-default.xml
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/BackupStore.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/Child.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/CompletedJobStatusStore.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/EagerTaskInitializationListener.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/FileInputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/FileOutputCommitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/FileOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/IndexCache.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/InputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/IsolationRunner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobConf.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobEndNotifier.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobQueueTaskScheduler.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JvmManager.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/KeyValueLineRecordReader.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LimitTasksPerJobTaskScheduler.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LineRecordReader.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LinuxTaskController.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTask.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/Mapper.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/Merger.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/NodeHealthCheckerService.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/Reducer.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/SkipBadRecords.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/Task.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskController.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskLog.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskMemoryManagerThread.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/CombineFileRecordReader.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/KeyFieldBasedComparator.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/LazyOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/MultipleInputs.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/MultipleOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/MultithreadedMapRunner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/NLineInputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/RegexMapper.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/lib/db/DBOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/package.html
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Application.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesMapRunner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesNonJavaInputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesReducer.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Cluster.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/InputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobContext.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/filecache/TrackerDistributedCacheManager.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistory.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/aggregate/UniqValueCount.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueAggregatorBaseDescriptor.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueAggregatorCombiner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueAggregatorJob.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueAggregatorJobBase.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueAggregatorReducer.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/db/DBConfiguration.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionMapper.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionReducer.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueLineRecordReader.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/LineRecordReader.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/MultipleInputs.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFilter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/Parser.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/MultithreadedMapper.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/RegexMapper.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/LazyOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/SequenceFileAsBinaryOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/SequenceFileOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/TextOutputFormat.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedComparator.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/TotalOrderPartitioner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/server/tasktracker/Localizer.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleClientMetrics.java
    hadoop/mapreduce/trunk/src/java/overview.html
    hadoop/mapreduce/trunk/src/test/mapred-site.xml
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/cli/TestMRCLI.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/conf/TestJobConf.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/conf/TestNoDefaultsJobConf.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/BigMapOutput.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ClusterWithLinuxTaskController.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ControlledMapReduceJob.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/GenericMRLoadGenerator.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MiniMRCluster.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/NotificationTestCase.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ReliabilityTest.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestBadRecords.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCompressedEmptyMapOutputs.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestControlledMapReduceJob.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestEmptyJob.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestFieldSelection.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestFileOutputCommitter.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestIndexCache.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestIsolationRunner.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobDirCleanup.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgress.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgressListener.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobQueueInformation.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobRetire.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobStatusPersistency.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillCompletedJob.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLazyOutput.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLimitTasksPerJobTaskScheduler.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLocalizationWithLinuxTaskController.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLostTracker.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMRServerPorts.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapCollection.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapOutputType.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapProgress.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapRed.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapredHeartbeat.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapredSystemDir.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRClasspath.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRDFSSort.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRWithDFS.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMultipleLevelCaching.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestNodeRefresh.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestQueueManager.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRecoveryManager.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetch.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceTask.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupAndCleanupFailure.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupTaskScheduling.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSeveral.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpeculativeExecution.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpilledRecordsCounter.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSubmitJob.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTTMemoryReporting.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskFail.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskLimits.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerLocalization.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTextInputFormat.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTextOutputFormat.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerBlacklistAcrossJobs.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerDistributedCacheManagerWithLinuxTaskController.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerReservation.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ThreadedMapBenchmark.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/join/TestDatamerge.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestLineInputFormat.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/FailJob.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/GenericMRLoadGenerator.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/SleepJob.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/Logalyzer.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Fri Sep 18 15:09:48 2009
@@ -39,6 +39,9 @@
     MAPREDUCE-157. Refactor job history APIs and change the history format to 
     JSON. (Jothi Padmanabhan via sharad)
 
+    MAPREDUCE-849. Rename configuration properties. (Amareshwari Sriramadasu 
+    via sharad)
+
   NEW FEATURES
 
     MAPREDUCE-706. Support for FIFO pools in the fair scheduler.

Modified: hadoop/mapreduce/trunk/conf/taskcontroller.cfg
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/conf/taskcontroller.cfg?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/conf/taskcontroller.cfg (original)
+++ hadoop/mapreduce/trunk/conf/taskcontroller.cfg Fri Sep 18 15:09:48 2009
@@ -1,2 +1,2 @@
-mapred.local.dir=#configured value of mapred.local.dir. It can be a list of comma separated paths.
+mapreduce.cluster.local.dir=#configured value of mapreduce.cluster.local.dir. It can be a list of comma separated paths.
 hadoop.log.dir=#configured value of hadoop.log.dir.
\ No newline at end of file

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/generateData.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/generateData.sh?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/generateData.sh (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/generateData.sh Fri Sep 18 15:09:48 2009
@@ -29,51 +29,51 @@
 
 ${HADOOP_HOME}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
-  -D test.randomtextwrite.total_bytes=${COMPRESSED_DATA_BYTES} \
-  -D test.randomtextwrite.bytes_per_map=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
-  -D test.randomtextwrite.min_words_key=5 \
-  -D test.randomtextwrite.max_words_key=10 \
-  -D test.randomtextwrite.min_words_value=100 \
-  -D test.randomtextwrite.max_words_value=10000 \
-  -D mapred.output.compress=true \
+  -D mapreduce.randomtextwriter.totalbytes=${COMPRESSED_DATA_BYTES} \
+  -D mapreduce.randomtextwriter.bytespermap=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
+  -D mapreduce.randomtextwriter.minwordskey=5 \
+  -D mapreduce.randomtextwriter.maxwordskey=10 \
+  -D mapreduce.randomtextwriter.minwordsvalue=100 \
+  -D mapreduce.randomtextwriter.maxwordsvalue=10000 \
+  -D mapreduce.output.fileoutputformat.compress=true \
   -D mapred.map.output.compression.type=BLOCK \
-  -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat \
+  -outFormat org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat \
   ${VARCOMPSEQ} &
 
 ${HADOOP_HOME}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
-  -D test.randomtextwrite.total_bytes=${COMPRESSED_DATA_BYTES} \
-  -D test.randomtextwrite.bytes_per_map=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
-  -D test.randomtextwrite.min_words_key=5 \
-  -D test.randomtextwrite.max_words_key=5 \
-  -D test.randomtextwrite.min_words_value=100 \
-  -D test.randomtextwrite.max_words_value=100 \
-  -D mapred.output.compress=true \
+  -D mapreduce.randomtextwriter.totalbytes=${COMPRESSED_DATA_BYTES} \
+  -D mapreduce.randomtextwriter.bytespermap=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
+  -D mapreduce.randomtextwriter.minwordskey=5 \
+  -D mapreduce.randomtextwriter.maxwordskey=5 \
+  -D mapreduce.randomtextwriter.minwordsvalue=100 \
+  -D mapreduce.randomtextwriter.maxwordsvalue=100 \
+  -D mapreduce.output.fileoutputformat.compress=true \
   -D mapred.map.output.compression.type=BLOCK \
-  -outFormat org.apache.hadoop.mapred.SequenceFileOutputFormat \
+  -outFormat org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat \
   ${FIXCOMPSEQ} &
 
 ${HADOOP_HOME}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
-  -D test.randomtextwrite.total_bytes=${UNCOMPRESSED_DATA_BYTES} \
-  -D test.randomtextwrite.bytes_per_map=$((${UNCOMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
-  -D test.randomtextwrite.min_words_key=1 \
-  -D test.randomtextwrite.max_words_key=10 \
-  -D test.randomtextwrite.min_words_value=0 \
-  -D test.randomtextwrite.max_words_value=200 \
-  -D mapred.output.compress=false \
-  -outFormat org.apache.hadoop.mapred.TextOutputFormat \
+  -D mapreduce.randomtextwriter.totalbytes=${UNCOMPRESSED_DATA_BYTES} \
+  -D mapreduce.randomtextwriter.bytespermap=$((${UNCOMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
+  -D mapreduce.randomtextwriter.minwordskey=1 \
+  -D mapreduce.randomtextwriter.maxwordskey=10 \
+  -D mapreduce.randomtextwriter.minwordsvalue=0 \
+  -D mapreduce.randomtextwriter.maxwordsvalue=200 \
+  -D mapreduce.output.fileoutputformat.compress=false \
+  -outFormat org.apache.hadoop.mapreduce.lib.output.TextOutputFormat \
   ${VARINFLTEXT} &
 
 ${HADOOP_HOME}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
-  -D test.randomtextwrite.total_bytes=${INDIRECT_DATA_BYTES} \
-  -D test.randomtextwrite.bytes_per_map=$((${INDIRECT_DATA_BYTES} / ${INDIRECT_DATA_FILES})) \
-  -D test.randomtextwrite.min_words_key=5 \
-  -D test.randomtextwrite.max_words_key=5 \
-  -D test.randomtextwrite.min_words_value=20 \
-  -D test.randomtextwrite.max_words_value=20 \
-  -D mapred.output.compress=true \
+  -D mapreduce.randomtextwriter.totalbytes=${INDIRECT_DATA_BYTES} \
+  -D mapreduce.randomtextwriter.bytespermap=$((${INDIRECT_DATA_BYTES} / ${INDIRECT_DATA_FILES})) \
+  -D mapreduce.randomtextwriter.minwordskey=5 \
+  -D mapreduce.randomtextwriter.maxwordskey=5 \
+  -D mapreduce.randomtextwriter.minwordsvalue=20 \
+  -D mapreduce.randomtextwriter.maxwordsvalue=20 \
+  -D mapreduce.output.fileoutputformat.compress=true \
   -D mapred.map.output.compression.type=BLOCK \
-  -outFormat org.apache.hadoop.mapred.TextOutputFormat \
+  -outFormat org.apache.hadoop.mapreduce.lib.output.TextOutputFormat \
   ${FIXCOMPTEXT} &

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.large
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.large?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.large (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.large Fri Sep 18 15:09:48 2009
@@ -12,5 +12,5 @@
 ${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
 
 
-${HADOOP_HOME}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapred.output.key.class=org.apache.hadoop.io.Text,mapred.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
+${HADOOP_HOME}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapreduce.job.output.key.class=org.apache.hadoop.io.Text,mapreduce.job.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.medium
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.medium?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.medium (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.medium Fri Sep 18 15:09:48 2009
@@ -12,5 +12,5 @@
 ${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
 
 
-${HADOOP_HOME}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapred.output.key.class=org.apache.hadoop.io.Text,mapred.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
+${HADOOP_HOME}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapreduce.job.output.key.class=org.apache.hadoop.io.Text,mapreduce.job.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.small
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.small?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.small (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix/pipesort/text-sort.small Fri Sep 18 15:09:48 2009
@@ -12,5 +12,5 @@
 ${HADOOP_HOME}/bin/hadoop dfs -rmr $OUTDIR
 
 
-${HADOOP_HOME}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapred.output.key.class=org.apache.hadoop.io.Text,mapred.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
+${HADOOP_HOME}/bin/hadoop pipes -input $INDIR -output $OUTDIR -inputformat org.apache.hadoop.mapred.KeyValueTextInputFormat -program ${GRID_MIX_PROG}/pipes-sort -reduces $NUM_OF_REDUCERS -jobconf mapreduce.job.output.key.class=org.apache.hadoop.io.Text,mapreduce.job.output.value.class=org.apache.hadoop.io.Text -writer org.apache.hadoop.mapred.TextOutputFormat
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/generateGridmix2data.sh
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/generateGridmix2data.sh?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/generateGridmix2data.sh (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/generateGridmix2data.sh Fri Sep 18 15:09:48 2009
@@ -53,13 +53,13 @@
 
 ${HADOOP_HOME}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
-  -D test.randomtextwrite.total_bytes=${COMPRESSED_DATA_BYTES} \
-  -D test.randomtextwrite.bytes_per_map=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
-  -D test.randomtextwrite.min_words_key=5 \
-  -D test.randomtextwrite.max_words_key=10 \
-  -D test.randomtextwrite.min_words_value=100 \
-  -D test.randomtextwrite.max_words_value=10000 \
-  -D mapred.output.compress=true \
+  -D mapreduce.randomtextwriter.totalbytes=${COMPRESSED_DATA_BYTES} \
+  -D mapreduce.randomtextwriter.bytespermap=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
+  -D mapreduce.randomtextwriter.minwordskey=5 \
+  -D mapreduce.randomtextwriter.maxwordskey=10 \
+  -D mapreduce.randomtextwriter.minwordsvalue=100 \
+  -D mapreduce.randomtextwriter.maxwordsvalue=10000 \
+  -D mapreduce.output.fileoutputformat.compress=true \
   -D mapred.map.output.compression.type=BLOCK \
   -outFormat org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat \
   ${VARCOMPSEQ} &
@@ -67,13 +67,13 @@
 
 ${HADOOP_HOME}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
-  -D test.randomtextwrite.total_bytes=${COMPRESSED_DATA_BYTES} \
-  -D test.randomtextwrite.bytes_per_map=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
-  -D test.randomtextwrite.min_words_key=5 \
-  -D test.randomtextwrite.max_words_key=5 \
-  -D test.randomtextwrite.min_words_value=100 \
-  -D test.randomtextwrite.max_words_value=100 \
-  -D mapred.output.compress=true \
+  -D mapreduce.randomtextwriter.totalbytes=${COMPRESSED_DATA_BYTES} \
+  -D mapreduce.randomtextwriter.bytespermap=$((${COMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
+  -D mapreduce.randomtextwriter.minwordskey=5 \
+  -D mapreduce.randomtextwriter.maxwordskey=5 \
+  -D mapreduce.randomtextwriter.minwordsvalue=100 \
+  -D mapreduce.randomtextwriter.maxwordsvalue=100 \
+  -D mapreduce.output.fileoutputformat.compress=true \
   -D mapred.map.output.compression.type=BLOCK \
   -outFormat org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat \
   ${FIXCOMPSEQ} &
@@ -81,13 +81,13 @@
 
 ${HADOOP_HOME}/bin/hadoop jar \
   ${EXAMPLE_JAR} randomtextwriter \
-  -D test.randomtextwrite.total_bytes=${UNCOMPRESSED_DATA_BYTES} \
-  -D test.randomtextwrite.bytes_per_map=$((${UNCOMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
-  -D test.randomtextwrite.min_words_key=1 \
-  -D test.randomtextwrite.max_words_key=10 \
-  -D test.randomtextwrite.min_words_value=0 \
-  -D test.randomtextwrite.max_words_value=200 \
-  -D mapred.output.compress=false \
+  -D mapreduce.randomtextwriter.totalbytes=${UNCOMPRESSED_DATA_BYTES} \
+  -D mapreduce.randomtextwriter.bytespermap=$((${UNCOMPRESSED_DATA_BYTES} / ${NUM_MAPS})) \
+  -D mapreduce.randomtextwriter.minwordskey=1 \
+  -D mapreduce.randomtextwriter.maxwordskey=10 \
+  -D mapreduce.randomtextwriter.minwordsvalue=0 \
+  -D mapreduce.randomtextwriter.maxwordsvalue=200 \
+  -D mapreduce.output.fileoutputformat.compress=false \
   -outFormat org.apache.hadoop.mapreduce.lib.output.TextOutputFormat \
   ${VARINFLTEXT} &
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/CombinerJobCreator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/CombinerJobCreator.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/CombinerJobCreator.java (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/CombinerJobCreator.java Fri Sep 18 15:09:48 2009
@@ -58,8 +58,8 @@
         return null;
       }
     }
-    conf.setBoolean("mapred.compress.map.output", mapoutputCompressed);
-    conf.setBoolean("mapred.output.compress", outputCompressed);
+    conf.setBoolean(JobContext.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
+    conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
 
     Job job = new Job(conf);
     job.setJobName("GridmixCombinerJob");

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GenericMRLoadJobCreator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GenericMRLoadJobCreator.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GenericMRLoadJobCreator.java (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GenericMRLoadJobCreator.java Fri Sep 18 15:09:48 2009
@@ -56,7 +56,7 @@
       // No input dir? Generate random data
       System.err.println("No input path; ignoring InputFormat");
       confRandom(job);
-    } else if (null != conf.getClass("mapred.indirect.input.format", null)) {
+    } else if (null != conf.getClass(INDIRECT_INPUT_FORMAT, null)) {
       // specified IndirectInputFormat? Build src list
       JobClient jClient = new JobClient(conf);
       Path sysdir = jClient.getSystemDir();
@@ -64,7 +64,7 @@
       Path indirInputFile = new Path(sysdir, Integer.toString(r
           .nextInt(Integer.MAX_VALUE), 36)
           + "_files");
-      conf.set("mapred.indirect.input.file", indirInputFile.toString());
+      conf.set(INDIRECT_INPUT_FILE, indirInputFile.toString());
       SequenceFile.Writer writer = SequenceFile.createWriter(sysdir
           .getFileSystem(conf), conf, indirInputFile, LongWritable.class,
           Text.class, SequenceFile.CompressionType.NONE);
@@ -92,8 +92,8 @@
       }
     }
 
-    conf.setBoolean("mapred.compress.map.output", mapoutputCompressed);
-    conf.setBoolean("mapred.output.compress", outputCompressed);
+    conf.setBoolean(JobContext.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
+    conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
     return job;
   }
 

Modified: hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GridMixRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GridMixRunner.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GridMixRunner.java (original)
+++ hadoop/mapreduce/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/GridMixRunner.java Fri Sep 18 15:09:48 2009
@@ -126,8 +126,8 @@
       clearDir(outdir);
       try {
         Configuration conf = StreamJob.createJob(args);
-        conf.setBoolean("mapred.output.compress", outputCompressed);
-        conf.setBoolean("mapred.compress.map.output", mapoutputCompressed);
+        conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
+        conf.setBoolean(JobContext.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
         Job job = new Job(conf, "GridmixStreamingSorter." + size);
         ControlledJob cjob = new ControlledJob(job, null);
         gridmix.addJob(cjob);
@@ -149,8 +149,8 @@
 
       try {
         Configuration conf = new Configuration();
-        conf.setBoolean("mapred.output.compress", outputCompressed);
-        conf.setBoolean("mapred.compress.map.output", mapoutputCompressed);
+        conf.setBoolean(FileOutputFormat.COMPRESS, outputCompressed);
+        conf.setBoolean(JobContext.MAP_OUTPUT_COMPRESS, mapoutputCompressed);
         Job job = new Job(conf);
         job.setJarByClass(Sort.class);
         job.setJobName("GridmixJavaSorter." + size);

Modified: hadoop/mapreduce/trunk/src/c++/pipes/impl/HadoopPipes.cc
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/c%2B%2B/pipes/impl/HadoopPipes.cc?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/c++/pipes/impl/HadoopPipes.cc (original)
+++ hadoop/mapreduce/trunk/src/c++/pipes/impl/HadoopPipes.cc Fri Sep 18 15:09:48 2009
@@ -701,8 +701,8 @@
       }
       if (reducer != NULL) {
         int64_t spillSize = 100;
-        if (jobConf->hasKey("io.sort.mb")) {
-          spillSize = jobConf->getInt("io.sort.mb");
+        if (jobConf->hasKey("mapreduce.task.io.sort.mb")) {
+          spillSize = jobConf->getInt("mapreduce.task.io.sort.mb");
         }
         writer = new CombineRunner(spillSize * 1024 * 1024, this, reducer, 
                                    uplink, partitioner, numReduces);
@@ -937,7 +937,7 @@
    */
   void* ping(void* ptr) {
     TaskContextImpl* context = (TaskContextImpl*) ptr;
-    char* portStr = getenv("hadoop.pipes.command.port");
+    char* portStr = getenv("mapreduce.pipes.command.port");
     int MAX_RETRIES = 3;
     int remaining_retries = MAX_RETRIES;
     while (!context->isDone()) {
@@ -990,7 +990,7 @@
     try {
       TaskContextImpl* context = new TaskContextImpl(factory);
       Protocol* connection;
-      char* portStr = getenv("hadoop.pipes.command.port");
+      char* portStr = getenv("mapreduce.pipes.command.port");
       int sock = -1;
       FILE* stream = NULL;
       FILE* outStream = NULL;
@@ -1024,8 +1024,8 @@
                                      + strerror(errno));
 
         connection = new BinaryProtocol(stream, context, outStream);
-      } else if (getenv("hadoop.pipes.command.file")) {
-        char* filename = getenv("hadoop.pipes.command.file");
+      } else if (getenv("mapreduce.pipes.commandfile")) {
+        char* filename = getenv("mapreduce.pipes.commandfile");
         string outFilename = filename;
         outFilename += ".out";
         stream = fopen(filename, "r");

Modified: hadoop/mapreduce/trunk/src/c++/task-controller/task-controller.c
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/c%2B%2B/task-controller/task-controller.c?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/c++/task-controller/task-controller.c (original)
+++ hadoop/mapreduce/trunk/src/c++/task-controller/task-controller.c Fri Sep 18 15:09:48 2009
@@ -206,7 +206,7 @@
 }
 
 /**
- * Function to check if the passed tt_root is present in mapred.local.dir
+ * Function to check if the passed tt_root is present in mapreduce.cluster.local.dir
  * the task-controller is configured with.
  */
 int check_tt_root(const char *tt_root) {
@@ -442,7 +442,7 @@
       break;
     }
 
-    // prepare attempt-dir in each of the mapred_local_dir
+    // prepare attempt-dir in each of the mapreduce.cluster.local.dir
     attempt_dir = get_attempt_directory(job_dir, attempt_id);
     if (attempt_dir == NULL) {
       fprintf(LOGFILE, "Couldn't get attempt directory for %s.\n", attempt_id);
@@ -911,7 +911,7 @@
 
 /*
  * Function used to launch a task as the provided user. It does the following :
- * 1) Checks if the tt_root passed is found in mapred.local.dir
+ * 1) Checks if the tt_root passed is found in mapreduce.cluster.local.dir
  * 2) Prepares attempt_dir and log_dir to be accessible by the child
  * 3) Uses get_task_launcher_file to fetch the task script file path
  * 4) Does an execlp on the same in order to replace the current image with

Modified: hadoop/mapreduce/trunk/src/c++/task-controller/task-controller.h
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/c%2B%2B/task-controller/task-controller.h?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/c++/task-controller/task-controller.h (original)
+++ hadoop/mapreduce/trunk/src/c++/task-controller/task-controller.h Fri Sep 18 15:09:48 2009
@@ -83,7 +83,7 @@
 
 #define TASK_SCRIPT_PATTERN "%s/%s/taskjvm.sh"
 
-#define TT_SYS_DIR_KEY "mapred.local.dir"
+#define TT_SYS_DIR_KEY "mapreduce.cluster.local.dir"
 
 #define TT_LOG_DIR_KEY "hadoop.log.dir"
 

Modified: hadoop/mapreduce/trunk/src/c++/task-controller/tests/test-task-controller.c
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/c%2B%2B/task-controller/tests/test-task-controller.c?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/c++/task-controller/tests/test-task-controller.c (original)
+++ hadoop/mapreduce/trunk/src/c++/task-controller/tests/test-task-controller.c Fri Sep 18 15:09:48 2009
@@ -22,7 +22,7 @@
 int write_config_file(char *file_name) {
   FILE *file;
   char const *str =
-      "mapred.local.dir=/tmp/testing1,/tmp/testing2,/tmp/testing3,/tmp/testing4\n";
+      "mapreduce.cluster.local.dir=/tmp/testing1,/tmp/testing2,/tmp/testing3,/tmp/testing4\n";
 
   file = fopen(file_name, "w");
   if (file == NULL) {
@@ -67,7 +67,7 @@
   // Test obtaining a value for a key from the config
   char *config_values[4] = { "/tmp/testing1", "/tmp/testing2",
       "/tmp/testing3", "/tmp/testing4" };
-  char *value = (char *) get_value("mapred.local.dir");
+  char *value = (char *) get_value("mapreduce.cluster.local.dir");
   if (strcmp(value, "/tmp/testing1,/tmp/testing2,/tmp/testing3,/tmp/testing4")
       != 0) {
     printf("Obtaining a value for a key from the config failed.\n");
@@ -75,7 +75,7 @@
   }
 
   // Test the parsing of a multiple valued key from the config
-  char **values = (char **) get_values("mapred.local.dir");
+  char **values = (char **)get_values("mapreduce.cluster.local.dir");
   char **values_ptr = values;
   int i = 0;
   while (*values_ptr != NULL) {
@@ -87,12 +87,12 @@
     values_ptr++;
   }
 
-  if (check_variable_against_config("mapred.local.dir", "/tmp/testing5") == 0) {
+  if (check_variable_against_config("mapreduce.cluster.local.dir", "/tmp/testing5") == 0) {
     printf("Configuration should not contain /tmp/testing5! \n");
     goto cleanup;
   }
 
-  if (check_variable_against_config("mapred.local.dir", "/tmp/testing4") != 0) {
+  if (check_variable_against_config("mapreduce.cluster.local.dir", "/tmp/testing4") != 0) {
     printf("Configuration should contain /tmp/testing4! \n");
     goto cleanup;
   }

Modified: hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/MemoryMatcher.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/MemoryMatcher.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/MemoryMatcher.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred/MemoryMatcher.java Fri Sep 18 15:09:48 2009
@@ -20,7 +20,9 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.conf.Configuration;
 
 class MemoryMatcher {
@@ -166,11 +168,10 @@
 
     memSizeForMapSlotOnJT =
         JobConf.normalizeMemoryConfigValue(conf.getLong(
-            JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY,
-            JobConf.DISABLED_MEMORY_LIMIT));
+            MRConfig.MAPMEMORY_MB, JobConf.DISABLED_MEMORY_LIMIT));
     memSizeForReduceSlotOnJT =
         JobConf.normalizeMemoryConfigValue(conf.getLong(
-            JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY,
+            MRConfig.REDUCEMEMORY_MB,
             JobConf.DISABLED_MEMORY_LIMIT));
 
     //handling @deprecated values
@@ -178,8 +179,8 @@
       LOG.warn(
         JobConf.deprecatedString(
           JobConf.UPPER_LIMIT_ON_TASK_VMEM_PROPERTY)+
-          " instead use " +JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY+
-          " and " + JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY
+          " instead use " + JTConfig.JT_MAX_MAPMEMORY_MB +
+          " and " + JTConfig.JT_MAX_REDUCEMEMORY_MB
       );
 
       limitMaxMemForMapTasks = limitMaxMemForReduceTasks =
@@ -197,13 +198,11 @@
       limitMaxMemForMapTasks =
         JobConf.normalizeMemoryConfigValue(
           conf.getLong(
-            JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY,
-            JobConf.DISABLED_MEMORY_LIMIT));
+            JTConfig.JT_MAX_MAPMEMORY_MB, JobConf.DISABLED_MEMORY_LIMIT));
       limitMaxMemForReduceTasks =
         JobConf.normalizeMemoryConfigValue(
           conf.getLong(
-            JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY,
-            JobConf.DISABLED_MEMORY_LIMIT));
+            JTConfig.JT_MAX_REDUCEMEMORY_MB, JobConf.DISABLED_MEMORY_LIMIT));
     }
     LOG.info(String.format("Scheduler configured with "
         + "(memSizeForMapSlotOnJT, memSizeForReduceSlotOnJT, "

Modified: hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ClusterWithCapacityScheduler.java Fri Sep 18 15:09:48 2009
@@ -35,6 +35,7 @@
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /**
  * A test-cluster based on {@link MiniMRCluster} that is started with
@@ -102,7 +103,7 @@
       setUpSchedulerConfigFile(schedulerProperties);
     }
 
-    clusterConf.set("mapred.jobtracker.taskScheduler",
+    clusterConf.set(JTConfig.JT_TASK_SCHEDULER,
         CapacityTaskScheduler.class.getName());
     mrCluster =
         new MiniMRCluster(numTaskTrackers, "file:///", 1, null, null,

Modified: hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java Fri Sep 18 15:09:48 2009
@@ -22,7 +22,9 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.JobStatusChangeEvent.EventType;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
 import static org.apache.hadoop.mapred.CapacityTestUtils.*;
 
@@ -627,16 +629,10 @@
     scheduler.setTaskTrackerManager(taskTrackerManager);
     // enabled memory-based scheduling
     // Normal job in the cluster would be 1GB maps/reduces
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY,
-      2 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY, 1 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY,
-      1 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 1 * 1024);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_MAPMEMORY_MB, 2 * 1024);
+    scheduler.getConf().setLong(MRConfig.MAPMEMORY_MB, 1 * 1024);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_REDUCEMEMORY_MB, 1 * 1024);
+    scheduler.getConf().setLong(MRConfig.REDUCEMEMORY_MB, 1 * 1024);
     scheduler.setResourceManagerConf(resConf);
     scheduler.start();
     scheduler.getRoot().getChildren().get(0).getQueueSchedulingContext()
@@ -1004,13 +1000,13 @@
     // enabled memory-based scheduling
     // Normal job in the cluster would be 1GB maps/reduces
     scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY, 2 * 1024);
+        JTConfig.JT_MAX_MAPMEMORY_MB, 2 * 1024);
     scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY, 1 * 1024);
+        MRConfig.MAPMEMORY_MB, 1 * 1024);
     scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY, 2 * 1024);
+        JTConfig.JT_MAX_REDUCEMEMORY_MB, 2 * 1024);
     scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 1 * 1024);
+        MRConfig.REDUCEMEMORY_MB, 1 * 1024);
     scheduler.setResourceManagerConf(resConf);
     scheduler.start();
 
@@ -1425,16 +1421,10 @@
     scheduler.setTaskTrackerManager(taskTrackerManager);
     // enabled memory-based scheduling
     // Normal job in the cluster would be 1GB maps/reduces
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY,
-      2 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY, 1 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY,
-      1 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 1 * 1024);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_MAPMEMORY_MB, 2 * 1024);
+    scheduler.getConf().setLong(MRConfig.MAPMEMORY_MB, 1 * 1024);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_REDUCEMEMORY_MB, 1 * 1024);
+    scheduler.getConf().setLong(MRConfig.REDUCEMEMORY_MB, 1 * 1024);
     scheduler.setResourceManagerConf(resConf);
     scheduler.start();
 
@@ -1507,16 +1497,10 @@
     scheduler.setTaskTrackerManager(taskTrackerManager);
     // enabled memory-based scheduling
     // Normal jobs 1GB maps/reduces. 2GB limit on maps/reduces
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY,
-      2 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY, 1 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY,
-      2 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 1 * 1024);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_MAPMEMORY_MB, 2 * 1024);
+    scheduler.getConf().setLong(MRConfig.MAPMEMORY_MB, 1 * 1024);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_REDUCEMEMORY_MB, 2 * 1024);
+    scheduler.getConf().setLong(MRConfig.REDUCEMEMORY_MB, 1 * 1024);
     scheduler.setResourceManagerConf(resConf);
     scheduler.start();
 
@@ -1658,16 +1642,10 @@
     scheduler.setTaskTrackerManager(taskTrackerManager);
     // enabled memory-based scheduling
     LOG.debug("Assume TT has 2GB for maps and 2GB for reduces");
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY,
-      2 * 1024L);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY, 512);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY,
-      2 * 1024L);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 512);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_MAPMEMORY_MB, 2 * 1024L);
+    scheduler.getConf().setLong(MRConfig.MAPMEMORY_MB, 512);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_REDUCEMEMORY_MB, 2 * 1024L);
+    scheduler.getConf().setLong(MRConfig.REDUCEMEMORY_MB, 512);
     scheduler.setResourceManagerConf(resConf);
     scheduler.start();
 
@@ -2184,14 +2162,10 @@
     scheduler.setTaskTrackerManager(taskTrackerManager);
     // enabled memory-based scheduling
     // Normal job in the cluster would be 1GB maps/reduces
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY, 2 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY, 1 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY, 1 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 1 * 1024);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_MAPMEMORY_MB, 2 * 1024);
+    scheduler.getConf().setLong(MRConfig.MAPMEMORY_MB, 1 * 1024);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_REDUCEMEMORY_MB, 1 * 1024);
+    scheduler.getConf().setLong(MRConfig.REDUCEMEMORY_MB, 1 * 1024);
     scheduler.setResourceManagerConf(resConf);
     scheduler.start();
 
@@ -2321,14 +2295,10 @@
     );
     // enabled memory-based scheduling
     // Normal job in the cluster would be 1GB maps/reduces
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY, 2 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY, 1 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY, 1 * 1024);
-    scheduler.getConf().setLong(
-      JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 1 * 1024);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_MAPMEMORY_MB, 2 * 1024);
+    scheduler.getConf().setLong(MRConfig.MAPMEMORY_MB, 1 * 1024);
+    scheduler.getConf().setLong(JTConfig.JT_MAX_REDUCEMEMORY_MB, 1 * 1024);
+    scheduler.getConf().setLong(MRConfig.REDUCEMEMORY_MB, 1 * 1024);
     scheduler.setResourceManagerConf(resConf);
     scheduler.start();
 

Modified: hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacitySchedulerWithJobTracker.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacitySchedulerWithJobTracker.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacitySchedulerWithJobTracker.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacitySchedulerWithJobTracker.java Fri Sep 18 15:09:48 2009
@@ -21,6 +21,9 @@
 import java.util.Properties;
 
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 import org.apache.hadoop.mapreduce.SleepJob;
 
 public class TestCapacitySchedulerWithJobTracker extends
@@ -37,11 +40,9 @@
     Properties schedulerProps = new Properties();
     Properties clusterProps = new Properties();
     clusterProps.put("mapred.queue.names","default");
-    clusterProps.put("mapred.tasktracker.map.tasks.maximum", String.valueOf(1));
-    clusterProps.put(
-      "mapred.tasktracker.reduce.tasks.maximum", String
-        .valueOf(1));
-    clusterProps.put("mapred.jobtracker.maxtasks.per.job", String.valueOf(1));
+    clusterProps.put(TTConfig.TT_MAP_SLOTS, String.valueOf(1));
+    clusterProps.put(TTConfig.TT_REDUCE_SLOTS, String.valueOf(1));
+    clusterProps.put(JTConfig.JT_TASKS_PER_JOB, String.valueOf(1));
     // cluster capacity 1 maps, 1 reduces
     startCluster(1, clusterProps, schedulerProps);
     CapacityTaskScheduler scheduler = (CapacityTaskScheduler) getJobTracker()
@@ -83,10 +84,8 @@
     Job jobs[] = new Job[2];
 
     Properties clusterProps = new Properties();
-    clusterProps.put("mapred.tasktracker.map.tasks.maximum", String.valueOf(2));
-    clusterProps.put(
-      "mapred.tasktracker.reduce.tasks.maximum", String
-        .valueOf(2));
+    clusterProps.put(TTConfig.TT_MAP_SLOTS, String.valueOf(2));
+    clusterProps.put(TTConfig.TT_REDUCE_SLOTS, String.valueOf(2));
     clusterProps.put("mapred.queue.names", queues[0] + "," + queues[1]);
     startCluster(2, clusterProps, schedulerProps);
     CapacityTaskScheduler scheduler = (CapacityTaskScheduler) getJobTracker()
@@ -108,7 +107,7 @@
 
     JobConf conf = getJobConf();
     conf.setSpeculativeExecution(false);
-    conf.set("mapred.committer.job.setup.cleanup.needed", "false");
+    conf.set(JobContext.SETUP_CLEANUP_NEEDED, "false");
     conf.setNumTasksToExecutePerJvm(-1);
     conf.setQueueName(queues[0]);
     SleepJob sleepJob1 = new SleepJob();

Modified: hadoop/mapreduce/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.java Fri Sep 18 15:09:48 2009
@@ -25,6 +25,7 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapreduce.JobContext;
 
 /**
  * This abstract class serves as the base class for the mapper class of a data
@@ -55,7 +56,7 @@
   public void configure(JobConf job) {
     super.configure(job);
     this.job = job;
-    this.inputFile = job.get("map.input.file");
+    this.inputFile = job.get(JobContext.MAP_INPUT_FILE);
     this.inputTag = generateInputTag(this.inputFile);
   }
 

Modified: hadoop/mapreduce/trunk/src/contrib/dynamic-scheduler/README
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/dynamic-scheduler/README?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/dynamic-scheduler/README (original)
+++ hadoop/mapreduce/trunk/src/contrib/dynamic-scheduler/README Fri Sep 18 15:09:48 2009
@@ -33,7 +33,7 @@
 enforces the queue shares in the form of map and reduce slots of running jobs.
 
 Hadoop Configuration (e.g. hadoop-site.xml):
-mapred.jobtracker.taskScheduler      
+mapreduce.jobtracker.taskscheduler      
     This needs to be set to 
     org.apache.hadoop.mapred.DynamicPriorityScheduler
     to use the dynamic scheduler.
@@ -97,7 +97,8 @@
 For the servlet operations query path is everything that comes after /scheduler?
 in the url. For job submission the query path is just the empty string "".
 Job submissions also need to set the following job properties:
--Dmapred.job.timestamp=<ms epoch time> -Dmapred.job.signature=<signature as above> -Dmapred.job.queue.name=<queue>
+-Dmapred.job.timestamp=<ms epoch time> 
+-Dmapred.job.signature=<signature as above> -Dmapreduce.job.queue.name=<queue>
 Note queue must match the user submitting the job.
 
 Example python query 
@@ -161,6 +162,6 @@
   params = ""
 timestamp = long(time.time()*1000)
 params = params + "&user=%s&timestamp=%d" % (USER,timestamp)
-print "-Dmapred.job.timestamp=%d -Dmapred.job.signature=%s -Dmapred.job.queue.name=%s" % (timestamp, hmac_sha1(params, KEY), USER)
+print "-Dmapred.job.timestamp=%d -Dmapred.job.signature=%s -Dmapreduce.job.queue.name=%s" % (timestamp, hmac_sha1(params, KEY), USER)
 
 

Modified: hadoop/mapreduce/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/ConfProp.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/ConfProp.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/ConfProp.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/ConfProp.java Fri Sep 18 15:09:48 2009
@@ -84,7 +84,7 @@
    * Property name for naming the job tracker (URI). This property is related
    * to {@link #PI_MASTER_HOST_NAME}
    */
-  JOB_TRACKER_URI(false, "mapred.job.tracker", "localhost:50020"),
+  JOB_TRACKER_URI(false, "mapreduce.jobtracker.address", "localhost:50020"),
 
   /**
    * Property name for naming the default file system (URI).

Modified: hadoop/mapreduce/trunk/src/contrib/fairscheduler/designdoc/fair_scheduler_design_doc.tex
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/fairscheduler/designdoc/fair_scheduler_design_doc.tex?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/fairscheduler/designdoc/fair_scheduler_design_doc.tex (original)
+++ hadoop/mapreduce/trunk/src/contrib/fairscheduler/designdoc/fair_scheduler_design_doc.tex Fri Sep 18 15:09:48 2009
@@ -45,9 +45,9 @@
 
 \subsection{Pools}
 
-The Fair Scheduler groups jobs into ``pools" and performs fair sharing between these pools. Each pool can use either FIFO or fair sharing to schedule jobs internal to the pool. The pool that a job is placed in is determined by a JobConf property, the ``pool name property". By default, this is {\tt user.name}, so that there is one pool per user. However, different properties can be used, e.g.~{\tt group.name} to have one pool per Unix group.
+The Fair Scheduler groups jobs into ``pools" and performs fair sharing between these pools. Each pool can use either FIFO or fair sharing to schedule jobs internal to the pool. The pool that a job is placed in is determined by a JobConf property, the ``pool name property". By default, this is {\tt mapreduce.job.user.name}, so that there is one pool per user. However, different properties can be used, e.g.~{\tt group.name} to have one pool per Unix group.
 
-A common trick is to set the pool name property to an unused property name such as {\tt pool.name} and make this default to {\tt user.name}, so that there is one pool per user but it is also possible to place jobs into ``special" pools by setting their {\tt pool.name} directly. The {\tt mapred-site.xml} snippet below shows how to do this:
+A common trick is to set the pool name property to an unused property name such as {\tt pool.name} and make this default to {\tt mapreduce.job.user.name}, so that there is one pool per user but it is also possible to place jobs into ``special" pools by setting their {\tt pool.name} directly. The {\tt mapred-site.xml} snippet below shows how to do this:
 
 \begin{verbatim}
 <property>
@@ -57,7 +57,7 @@
 
 <property>
   <name>pool.name</name>
-  <value>${user.name}</value>
+  <value>${mapreduce.job.user.name}</value>
 </property>
 \end{verbatim}
 

Modified: hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java Fri Sep 18 15:09:48 2009
@@ -99,7 +99,7 @@
                             // used) or a String to specify an absolute path (if
                             // mapred.fairscheduler.allocation.file is used).
   private String poolNameProperty; // Jobconf property to use for determining a
-                                   // job's pool name (default: user.name)
+                                   // job's pool name (default: mapreduce.job.user.name)
   
   private Map<String, Pool> pools = new HashMap<String, Pool>();
   
@@ -115,7 +115,7 @@
       AllocationConfigurationException, ParserConfigurationException {
     Configuration conf = scheduler.getConf();
     this.poolNameProperty = conf.get(
-        "mapred.fairscheduler.poolnameproperty", "user.name");
+        "mapred.fairscheduler.poolnameproperty", JobContext.USER_NAME);
     this.allocFile = conf.get("mapred.fairscheduler.allocation.file");
     if (allocFile == null) {
       // No allocation file specified in jobconf. Use the default allocation

Modified: hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java Fri Sep 18 15:09:48 2009
@@ -1390,19 +1390,19 @@
     // Submit jobs, advancing time in-between to make sure that they are
     // all submitted at distinct times.
     JobInProgress job1 = submitJob(JobStatus.RUNNING, 10, 10);
-    job1.getJobConf().set("user.name", "user1");
+    job1.getJobConf().set(JobContext.USER_NAME, "user1");
     JobInfo info1 = scheduler.infos.get(job1);
     advanceTime(10);
     JobInProgress job2 = submitJob(JobStatus.RUNNING, 10, 10);
-    job2.getJobConf().set("user.name", "user1");
+    job2.getJobConf().set(JobContext.USER_NAME, "user1");
     JobInfo info2 = scheduler.infos.get(job2);
     advanceTime(10);
     JobInProgress job3 = submitJob(JobStatus.RUNNING, 10, 10);
-    job3.getJobConf().set("user.name", "user2");
+    job3.getJobConf().set(JobContext.USER_NAME, "user2");
     JobInfo info3 = scheduler.infos.get(job3);
     advanceTime(10);
     JobInProgress job4 = submitJob(JobStatus.RUNNING, 10, 10);
-    job4.getJobConf().set("user.name", "user2");
+    job4.getJobConf().set(JobContext.USER_NAME, "user2");
     JobInfo info4 = scheduler.infos.get(job4);
     
     // Check scheduler variables
@@ -1456,49 +1456,49 @@
     
     // Two jobs for user1; only one should get to run
     JobInProgress job1 = submitJob(JobStatus.RUNNING, 10, 10);
-    job1.getJobConf().set("user.name", "user1");
+    job1.getJobConf().set(JobContext.USER_NAME, "user1");
     JobInfo info1 = scheduler.infos.get(job1);
     advanceTime(10);
     JobInProgress job2 = submitJob(JobStatus.RUNNING, 10, 10);
-    job2.getJobConf().set("user.name", "user1");
+    job2.getJobConf().set(JobContext.USER_NAME, "user1");
     JobInfo info2 = scheduler.infos.get(job2);
     advanceTime(10);
     
     // Three jobs for user2; all should get to run
     JobInProgress job3 = submitJob(JobStatus.RUNNING, 10, 10);
-    job3.getJobConf().set("user.name", "user2");
+    job3.getJobConf().set(JobContext.USER_NAME, "user2");
     JobInfo info3 = scheduler.infos.get(job3);
     advanceTime(10);
     JobInProgress job4 = submitJob(JobStatus.RUNNING, 10, 10);
-    job4.getJobConf().set("user.name", "user2");
+    job4.getJobConf().set(JobContext.USER_NAME, "user2");
     JobInfo info4 = scheduler.infos.get(job4);
     advanceTime(10);
     JobInProgress job5 = submitJob(JobStatus.RUNNING, 10, 10);
-    job5.getJobConf().set("user.name", "user2");
+    job5.getJobConf().set(JobContext.USER_NAME, "user2");
     JobInfo info5 = scheduler.infos.get(job5);
     advanceTime(10);
     
     // Three jobs for user3; only two should get to run
     JobInProgress job6 = submitJob(JobStatus.RUNNING, 10, 10);
-    job6.getJobConf().set("user.name", "user3");
+    job6.getJobConf().set(JobContext.USER_NAME, "user3");
     JobInfo info6 = scheduler.infos.get(job6);
     advanceTime(10);
     JobInProgress job7 = submitJob(JobStatus.RUNNING, 10, 10);
-    job7.getJobConf().set("user.name", "user3");
+    job7.getJobConf().set(JobContext.USER_NAME, "user3");
     JobInfo info7 = scheduler.infos.get(job7);
     advanceTime(10);
     JobInProgress job8 = submitJob(JobStatus.RUNNING, 10, 10);
-    job8.getJobConf().set("user.name", "user3");
+    job8.getJobConf().set(JobContext.USER_NAME, "user3");
     JobInfo info8 = scheduler.infos.get(job8);
     advanceTime(10);
     
     // Two jobs for user4, in poolA; only one should get to run
     JobInProgress job9 = submitJob(JobStatus.RUNNING, 10, 10, "poolA");
-    job9.getJobConf().set("user.name", "user4");
+    job9.getJobConf().set(JobContext.USER_NAME, "user4");
     JobInfo info9 = scheduler.infos.get(job9);
     advanceTime(10);
     JobInProgress job10 = submitJob(JobStatus.RUNNING, 10, 10, "poolA");
-    job10.getJobConf().set("user.name", "user4");
+    job10.getJobConf().set(JobContext.USER_NAME, "user4");
     JobInfo info10 = scheduler.infos.get(job10);
     advanceTime(10);
     

Modified: hadoop/mapreduce/trunk/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSubmission.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSubmission.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSubmission.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/TestGridmixSubmission.java Fri Sep 18 15:09:48 2009
@@ -32,6 +32,7 @@
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.tools.rumen.JobStory;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
@@ -62,7 +63,7 @@
   @BeforeClass
   public static void initCluster() throws IOException {
     Configuration conf = new Configuration();
-    conf.setBoolean("mapred.job.tracker.retire.jobs", false);
+    conf.setBoolean(JTConfig.JT_RETIREJOBS, false);
     dfsCluster = new MiniDFSCluster(conf, 3, true, null);
     dfs = dfsCluster.getFileSystem();
     mrCluster = new MiniMRCluster(3, dfs.getUri().toString(), 1);

Modified: hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/main/UpdateIndex.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/main/UpdateIndex.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/main/UpdateIndex.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/main/UpdateIndex.java Fri Sep 18 15:09:48 2009
@@ -204,7 +204,8 @@
     IndexUpdateConfiguration iconf = new IndexUpdateConfiguration(jobConf);
 
     if (inputPathsString != null) {
-      jobConf.set("mapred.input.dir", inputPathsString);
+      jobConf.set(org.apache.hadoop.mapreduce.lib.input.
+        FileInputFormat.INPUT_DIR, inputPathsString);
     }
     inputPaths = FileInputFormat.getInputPaths(jobConf);
     if (inputPaths.length == 0) {

Modified: hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdateConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdateConfiguration.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdateConfiguration.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdateConfiguration.java Fri Sep 18 15:09:48 2009
@@ -23,6 +23,8 @@
 import org.apache.hadoop.contrib.index.example.LineDocInputFormat;
 import org.apache.hadoop.contrib.index.example.LineDocLocalAnalysis;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 
@@ -62,7 +64,7 @@
    * @return the IO sort space in MB
    */
   public int getIOSortMB() {
-    return conf.getInt("io.sort.mb", 100);
+    return conf.getInt(JobContext.IO_SORT_MB, 100);
   }
 
   /**
@@ -70,7 +72,7 @@
    * @param mb  the IO sort space in MB
    */
   public void setIOSortMB(int mb) {
-    conf.setInt("io.sort.mb", mb);
+    conf.setInt(JobContext.IO_SORT_MB, mb);
   }
 
   /**
@@ -78,7 +80,7 @@
    * @return the Map/Reduce temp directory
    */
   public String getMapredTempDir() {
-    return conf.get("mapred.temp.dir");
+    return conf.get(MRConfig.TEMP_DIR);
   }
 
   //

Modified: hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdater.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdater.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdater.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/mapred/IndexUpdater.java Fri Sep 18 15:09:48 2009
@@ -64,9 +64,9 @@
     IndexUpdateConfiguration iconf = new IndexUpdateConfiguration(conf);
     Shard.setIndexShards(iconf, shards);
 
-    // MapTask.MapOutputBuffer uses "io.sort.mb" to decide its max buffer size
-    // (max buffer size = 1/2 * "io.sort.mb").
-    // Here we half-en "io.sort.mb" because we use the other half memory to
+    // MapTask.MapOutputBuffer uses JobContext.IO_SORT_MB to decide its max buffer size
+    // (max buffer size = 1/2 * JobContext.IO_SORT_MB).
+    // Here we half-en JobContext.IO_SORT_MB because we use the other half memory to
     // build an intermediate form/index in Combiner.
     iconf.setIOSortMB(iconf.getIOSortMB() / 2);
 
@@ -93,10 +93,10 @@
       buffer.append(inputs[i].toString());
     }
     LOG.info("mapred.input.dir = " + buffer.toString());
-    LOG.info("mapred.output.dir = " + 
+    LOG.info("mapreduce.output.fileoutputformat.outputdir = " + 
              FileOutputFormat.getOutputPath(jobConf).toString());
-    LOG.info("mapred.map.tasks = " + jobConf.getNumMapTasks());
-    LOG.info("mapred.reduce.tasks = " + jobConf.getNumReduceTasks());
+    LOG.info("mapreduce.job.maps = " + jobConf.getNumMapTasks());
+    LOG.info("mapreduce.job.reduces = " + jobConf.getNumReduceTasks());
     LOG.info(shards.length + " shards = " + iconf.getIndexShards());
     // better if we don't create the input format instance
     LOG.info("mapred.input.format.class = "

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java Fri Sep 18 15:09:48 2009
@@ -37,6 +37,8 @@
 import org.apache.hadoop.mapred.lib.db.DBConfiguration;
 import org.apache.hadoop.mapred.lib.db.DBInputFormat;
 import org.apache.hadoop.mapred.lib.db.DBWritable;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 import org.apache.hadoop.sqoop.ConnFactory;
 import org.apache.hadoop.sqoop.ImportOptions;
@@ -73,7 +75,7 @@
 
     String tableClassName = new TableClassName(options).getClassForTable(tableName);
 
-    boolean isLocal = "local".equals(conf.get("mapred.job.tracker"));
+    boolean isLocal = "local".equals(conf.get(JTConfig.JT_IPC_ADDRESS));
     ClassLoader prevClassLoader = null;
     if (isLocal) {
       // If we're using the LocalJobRunner, then instead of using the compiled jar file
@@ -106,7 +108,7 @@
         job.setOutputFormat(SequenceFileOutputFormat.class);
         SequenceFileOutputFormat.setCompressOutput(job, true);
         SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);
-        job.set("mapred.output.value.class", tableClassName);
+        job.set(JobContext.OUTPUT_VALUE_CLASS, tableClassName);
       } else {
         LOG.warn("Unknown file layout specified: " + options.getFileLayout() + "; using text.");
       }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestAllTables.java Fri Sep 18 15:09:48 2009
@@ -48,9 +48,9 @@
 
     if (includeHadoopFlags) {
       args.add("-D");
-      args.add("mapred.job.tracker=local");
+      args.add("mapreduce.jobtracker.address=local");
       args.add("-D");
-      args.add("mapred.map.tasks=1");
+      args.add("mapreduce.job.maps=1");
       args.add("-D");
       args.add("fs.default.name=file:///");
     }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java Fri Sep 18 15:09:48 2009
@@ -55,9 +55,9 @@
 
     if (includeHadoopFlags) {
       args.add("-D");
-      args.add("mapred.job.tracker=local");
+      args.add("mapreduce.jobtracker.address=local");
       args.add("-D");
-      args.add("mapred.map.tasks=1");
+      args.add("mapreduce.job.maps=1");
       args.add("-D");
       args.add("fs.default.name=file:///");
     }



Mime
View raw message