hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r939849 [3/3] - in /hadoop/mapreduce/trunk: ./ src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/ src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/jo...
Date Fri, 30 Apr 2010 22:26:21 GMT
Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java
(original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java
Fri Apr 30 22:26:19 2010
@@ -300,8 +300,8 @@ public class TestMapCollection {
     conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
     Job job = Job.getInstance(new Cluster(conf), conf);
     conf = job.getConfiguration();
-    conf.setInt(JobContext.IO_SORT_MB, ioSortMB);
-    conf.set(JobContext.MAP_SORT_SPILL_PERCENT, Float.toString(spillPer));
+    conf.setInt(MRJobConfig.IO_SORT_MB, ioSortMB);
+    conf.set(MRJobConfig.MAP_SORT_SPILL_PERCENT, Float.toString(spillPer));
     conf.setClass("test.mapcollection.class", FixedRecordFactory.class,
         RecordFactory.class);
     FixedRecordFactory.setLengths(conf, keylen, vallen);
@@ -311,7 +311,7 @@ public class TestMapCollection {
 
   private static void runTest(String name, Job job) throws Exception {
     job.setNumReduceTasks(1);
-    job.getConfiguration().setInt(JobContext.IO_SORT_FACTOR, 1000);
+    job.getConfiguration().setInt(MRJobConfig.IO_SORT_FACTOR, 1000);
     job.getConfiguration().set("fs.default.name", "file:///");
     job.getConfiguration().setInt("test.mapcollection.num.maps", 1);
     job.setInputFormatClass(FakeIF.class);
@@ -411,9 +411,9 @@ public class TestMapCollection {
     conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
     Job job = Job.getInstance(new Cluster(conf), conf);
     conf = job.getConfiguration();
-    conf.setInt(JobContext.IO_SORT_MB, 1);
+    conf.setInt(MRJobConfig.IO_SORT_MB, 1);
     // 2^20 * spill = 14336 bytes available post-spill, at most 896 meta
-    conf.set(JobContext.MAP_SORT_SPILL_PERCENT, Float.toString(.986328125f));
+    conf.set(MRJobConfig.MAP_SORT_SPILL_PERCENT, Float.toString(.986328125f));
     conf.setClass("test.mapcollection.class", StepFactory.class,
         RecordFactory.class);
     StepFactory.setLengths(conf, 4000, 0, 96, 0, 252);
@@ -429,8 +429,8 @@ public class TestMapCollection {
     conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
     Job job = Job.getInstance(new Cluster(conf), conf);
     conf = job.getConfiguration();
-    conf.setInt(JobContext.IO_SORT_MB, 1);
-    conf.set(JobContext.MAP_SORT_SPILL_PERCENT, Float.toString(.986328125f));
+    conf.setInt(MRJobConfig.IO_SORT_MB, 1);
+    conf.set(MRJobConfig.MAP_SORT_SPILL_PERCENT, Float.toString(.986328125f));
     conf.setClass("test.mapcollection.class", StepFactory.class,
         RecordFactory.class);
     StepFactory.setLengths(conf, 4000, 261120, 96, 1024, 251);
@@ -498,14 +498,14 @@ public class TestMapCollection {
     conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
     Job job = Job.getInstance(new Cluster(conf), conf);
     conf = job.getConfiguration();
-    conf.setInt(JobContext.IO_SORT_MB, 1);
+    conf.setInt(MRJobConfig.IO_SORT_MB, 1);
     conf.setClass("test.mapcollection.class", RandomFactory.class,
         RecordFactory.class);
     final Random r = new Random();
     final long seed = r.nextLong();
     LOG.info("SEED: " + seed);
     r.setSeed(seed);
-    conf.set(JobContext.MAP_SORT_SPILL_PERCENT,
+    conf.set(MRJobConfig.MAP_SORT_SPILL_PERCENT,
         Float.toString(Math.max(0.1f, r.nextFloat())));
     RandomFactory.setLengths(conf, r, 1 << 14);
     conf.setInt("test.spillmap.records", r.nextInt(500));
@@ -519,15 +519,15 @@ public class TestMapCollection {
     conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
     Job job = Job.getInstance(new Cluster(conf), conf);
     conf = job.getConfiguration();
-    conf.setInt(JobContext.IO_SORT_MB, 1);
-    conf.setBoolean(JobContext.MAP_OUTPUT_COMPRESS, true);
+    conf.setInt(MRJobConfig.IO_SORT_MB, 1);
+    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
     conf.setClass("test.mapcollection.class", RandomFactory.class,
         RecordFactory.class);
     final Random r = new Random();
     final long seed = r.nextLong();
     LOG.info("SEED: " + seed);
     r.setSeed(seed);
-    conf.set(JobContext.MAP_SORT_SPILL_PERCENT,
+    conf.set(MRJobConfig.MAP_SORT_SPILL_PERCENT,
         Float.toString(Math.max(0.1f, r.nextFloat())));
     RandomFactory.setLengths(conf, r, 1 << 14);
     conf.setInt("test.spillmap.records", r.nextInt(500));

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java
(original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java
Fri Apr 30 22:26:19 2010
@@ -531,7 +531,7 @@ public class TestValueIterReset extends 
       job.setOutputKeyClass(IntWritable.class);
       job.setOutputValueClass(IntWritable.class);
       job.getConfiguration().
-        setInt(JobContext.REDUCE_MARKRESET_BUFFER_SIZE,128);  
+        setInt(MRJobConfig.REDUCE_MARKRESET_BUFFER_SIZE,128);  
       job.setInputFormatClass(TextInputFormat.class);
       job.setOutputFormatClass(TextOutputFormat.class);
       FileInputFormat.addInputPath(job,

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java
(original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java
Fri Apr 30 22:26:19 2010
@@ -37,8 +37,8 @@ import org.apache.hadoop.mapred.TaskCont
 import org.apache.hadoop.mapred.TaskTracker;
 import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -146,7 +146,7 @@ public class TestTrackerDistributedCache
     // Configures a task/job with both a regular file and a "classpath" file.
     Configuration subConf = new Configuration(conf);
     String userName = getJobOwnerName();
-    subConf.set(JobContext.USER_NAME, userName);
+    subConf.set(MRJobConfig.USER_NAME, userName);
     DistributedCache.addCacheFile(firstCacheFile.toUri(), subConf);
     DistributedCache.addFileToClassPath(secondCacheFile, subConf);
     TrackerDistributedCacheManager.determineTimestamps(subConf);
@@ -450,7 +450,7 @@ public class TestTrackerDistributedCache
     FileSystem localfs = FileSystem.getLocal(conf2);
     long now = System.currentTimeMillis();
     String userName = getJobOwnerName();
-    conf2.set(JobContext.USER_NAME, userName);
+    conf2.set(MRJobConfig.USER_NAME, userName);
 
     // We first test the size limit
     Path localCache = manager.getLocalCache(firstCacheFile.toUri(), conf2, 
@@ -524,7 +524,7 @@ public class TestTrackerDistributedCache
       new TrackerDistributedCacheManager(conf, taskController);
     conf.set("fs.fakefile.impl", conf.get("fs.file.impl"));
     String userName = getJobOwnerName();
-    conf.set(JobContext.USER_NAME, userName);
+    conf.set(MRJobConfig.USER_NAME, userName);
     Path fileToCache = new Path("fakefile:///"
         + firstCacheFile.toUri().getPath());
     Path result = manager.getLocalCache(fileToCache.toUri(), conf,
@@ -610,7 +610,7 @@ public class TestTrackerDistributedCache
     // ****** Imitate JobClient code
     // Configures a task/job with both a regular file and a "classpath" file.
     Configuration subConf = new Configuration(myConf);
-    subConf.set(JobContext.USER_NAME, userName);
+    subConf.set(MRJobConfig.USER_NAME, userName);
     DistributedCache.addCacheFile(firstCacheFile.toUri(), subConf);
     TrackerDistributedCacheManager.determineTimestamps(subConf);
     TrackerDistributedCacheManager.determineCacheVisibilities(subConf);
@@ -656,7 +656,7 @@ public class TestTrackerDistributedCache
     
     // submit another job
     Configuration subConf2 = new Configuration(myConf);
-    subConf2.set(JobContext.USER_NAME, userName);
+    subConf2.set(MRJobConfig.USER_NAME, userName);
     DistributedCache.addCacheFile(firstCacheFile.toUri(), subConf2);
     TrackerDistributedCacheManager.determineTimestamps(subConf2);
     TrackerDistributedCacheManager.determineCacheVisibilities(subConf2);
@@ -692,7 +692,7 @@ public class TestTrackerDistributedCache
       return;
     }
     String userName = getJobOwnerName();
-    conf.set(JobContext.USER_NAME, userName);
+    conf.set(MRJobConfig.USER_NAME, userName);
     TrackerDistributedCacheManager manager = 
         new TrackerDistributedCacheManager(conf, taskController);
     FileSystem localfs = FileSystem.getLocal(conf);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
(original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
Fri Apr 30 22:26:19 2010
@@ -53,7 +53,7 @@ public class TestWrappedRRClassloader ex
       new CompositeInputFormat<NullWritable>();
     // create dummy TaskAttemptID
     TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.MAP, 0, 0);
-    conf.set(JobContext.TASK_ATTEMPT_ID, tid.toString());
+    conf.set(MRJobConfig.TASK_ATTEMPT_ID, tid.toString());
     inputFormat.createRecordReader(inputFormat.getSplits(new Job(conf)).get(0), 
       new TaskAttemptContextImpl(conf, tid));
   }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
(original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
Fri Apr 30 22:26:19 2010
@@ -30,6 +30,7 @@ import org.apache.hadoop.mapred.UtilsFor
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.JobStatus;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
@@ -73,7 +74,7 @@ public class TestFileOutputCommitter ext
     Job job = new Job();
     FileOutputFormat.setOutputPath(job, outDir);
     Configuration conf = job.getConfiguration();
-    conf.set(JobContext.TASK_ATTEMPT_ID, attempt);
+    conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
     JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
     TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
     FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
@@ -110,7 +111,7 @@ public class TestFileOutputCommitter ext
     Job job = new Job();
     FileOutputFormat.setOutputPath(job, outDir);
     Configuration conf = job.getConfiguration();
-    conf.set(JobContext.TASK_ATTEMPT_ID, attempt);
+    conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
     JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
     TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
     FileOutputCommitter committer = new FileOutputCommitter(outDir, tContext);
@@ -160,7 +161,7 @@ public class TestFileOutputCommitter ext
     Configuration conf = job.getConfiguration();
     conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");
     conf.setClass("fs.faildel.impl", FakeFileSystem.class, FileSystem.class);
-    conf.set(JobContext.TASK_ATTEMPT_ID, attempt);
+    conf.set(MRJobConfig.TASK_ATTEMPT_ID, attempt);
     FileOutputFormat.setOutputPath(job, outDir);
     JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
     TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java
(original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java
Fri Apr 30 22:26:19 2010
@@ -28,7 +28,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.HadoopTestCase;
 import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.lib.map.InverseMapper;
@@ -43,7 +43,7 @@ public class TestMRKeyFieldBasedComparat
   public TestMRKeyFieldBasedComparator() throws IOException {
     super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
     conf = createJobConf();
-    conf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
+    conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
   }
   
   private void testComparator(String keySpec, int expect) 
@@ -54,7 +54,7 @@ public class TestMRKeyFieldBasedComparat
     
     conf.set("mapreduce.partition.keycomparator.options", keySpec);
     conf.set("mapreduce.partition.keypartitioner.options", "-k1.1,1.1");
-    conf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
+    conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
 
     Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 2,
                 line1 +"\n" + line2 + "\n"); 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
(original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
Fri Apr 30 22:26:19 2010
@@ -34,7 +34,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 
 public class TestTotalOrderPartitioner extends TestCase {
 
@@ -83,7 +83,7 @@ public class TestTotalOrderPartitioner e
                                  ).makeQualified(fs);
     Path p = new Path(testdir, testname + "/_partition.lst");
     TotalOrderPartitioner.setPartitionFile(conf, p);
-    conf.setInt(JobContext.NUM_REDUCES, splits.length + 1);
+    conf.setInt(MRJobConfig.NUM_REDUCES, splits.length + 1);
     SequenceFile.Writer w = null;
     try {
       w = SequenceFile.createWriter(fs, conf, p,
@@ -105,7 +105,7 @@ public class TestTotalOrderPartitioner e
     Configuration conf = new Configuration();
     Path p = TestTotalOrderPartitioner.<Text>writePartitionFile(
         "totalordermemcmp", conf, splitStrings);
-    conf.setClass(JobContext.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
+    conf.setClass(MRJobConfig.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
     try {
       partitioner.setConf(conf);
       NullWritable nw = NullWritable.get();
@@ -125,7 +125,7 @@ public class TestTotalOrderPartitioner e
     Path p = TestTotalOrderPartitioner.<Text>writePartitionFile(
         "totalorderbinarysearch", conf, splitStrings);
     conf.setBoolean(TotalOrderPartitioner.NATURAL_ORDER, false);
-    conf.setClass(JobContext.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
+    conf.setClass(MRJobConfig.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
     try {
       partitioner.setConf(conf);
       NullWritable nw = NullWritable.get();
@@ -159,8 +159,8 @@ public class TestTotalOrderPartitioner e
     Path p = TestTotalOrderPartitioner.<Text>writePartitionFile(
         "totalordercustomcomparator", conf, revSplitStrings);
     conf.setBoolean(TotalOrderPartitioner.NATURAL_ORDER, false);
-    conf.setClass(JobContext.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
-    conf.setClass(JobContext.KEY_COMPARATOR,
+    conf.setClass(MRJobConfig.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
+    conf.setClass(MRJobConfig.KEY_COMPARATOR,
       ReverseStringComparator.class, RawComparator.class);
     ArrayList<Check<Text>> revCheck = new ArrayList<Check<Text>>();
     revCheck.add(new Check<Text>(new Text("aaaaa"), 9));

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/security/TestTokenCache.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/security/TestTokenCache.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/security/TestTokenCache.java
(original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/security/TestTokenCache.java
Fri Apr 30 22:26:19 2010
@@ -44,7 +44,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.SleepJob;
 import org.apache.hadoop.security.TokenStorage;
 import org.apache.hadoop.security.token.Token;
@@ -207,9 +207,9 @@ public class TestTokenCache {
     
     // provide namenodes names for the job to get the delegation tokens for
     String nnUri = dfsCluster.getURI().toString();
-    jConf.set(JobContext.JOB_NAMENODES, nnUri + "," + nnUri);
+    jConf.set(MRJobConfig.JOB_NAMENODES, nnUri + "," + nnUri);
     // job tracker principla id..
-    jConf.set(JobContext.JOB_JOBTRACKER_ID, "jt_id");
+    jConf.set(MRJobConfig.JOB_JOBTRACKER_ID, "jt_id");
     
     // using argument to pass the file name
     String[] args = {

Modified: hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java (original)
+++ hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java Fri Apr 30
22:26:19 2010
@@ -64,8 +64,8 @@ import org.apache.hadoop.mapred.Reporter
 import org.apache.hadoop.mapred.SequenceFileRecordReader;
 import org.apache.hadoop.mapred.lib.NullOutputFormat;
 import org.apache.hadoop.mapreduce.Cluster;
-import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.JobSubmissionFiles;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -546,7 +546,7 @@ public class HadoopArchives implements T
     conf.setReducerClass(HArchivesReducer.class);
     conf.setMapOutputKeyClass(IntWritable.class);
     conf.setMapOutputValueClass(Text.class);
-    conf.set(JobContext.HISTORY_LOCATION, "none");
+    conf.set(MRJobConfig.HISTORY_LOCATION, "none");
     FileInputFormat.addInputPath(conf, jobDirectory);
     //make sure no speculative execution is done
     conf.setSpeculativeExecution(false);
@@ -583,7 +583,7 @@ public class HadoopArchives implements T
       // this is tightly tied to map reduce
       // since it does not expose an api 
       // to get the partition
-      partId = conf.getInt(JobContext.TASK_PARTITION, -1);
+      partId = conf.getInt(MRJobConfig.TASK_PARTITION, -1);
       // create a file name using the partition
       // we need to write to this directory
       tmpOutputDir = FileOutputFormat.getWorkOutputPath(conf);

Modified: hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java
(original)
+++ hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java
Fri Apr 30 22:26:19 2010
@@ -17,13 +17,13 @@
  */
 package org.apache.hadoop.tools.rumen;
 
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 
 public enum JobConfPropertyNames {
-  QUEUE_NAMES("mapred.job.queue.name", JobContext.QUEUE_NAME), JOB_NAMES(
-      JobContext.JOB_NAME), TASK_JAVA_OPTS_S("mapred.child.java.opts"),
-  MAP_JAVA_OPTS_S("mapred.child.java.opts", JobContext.MAP_JAVA_OPTS),
-  REDUCE_JAVA_OPTS_S("mapred.child.java.opts", JobContext.REDUCE_JAVA_OPTS);
+  QUEUE_NAMES("mapred.job.queue.name", MRJobConfig.QUEUE_NAME), JOB_NAMES(
+      MRJobConfig.JOB_NAME), TASK_JAVA_OPTS_S("mapred.child.java.opts"),
+  MAP_JAVA_OPTS_S("mapred.child.java.opts", MRJobConfig.MAP_JAVA_OPTS),
+  REDUCE_JAVA_OPTS_S("mapred.child.java.opts", MRJobConfig.REDUCE_JAVA_OPTS);
 
   private String[] candidates;
 

Modified: hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java?rev=939849&r1=939848&r2=939849&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java (original)
+++ hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java Fri
Apr 30 22:26:19 2010
@@ -29,7 +29,7 @@ import javax.xml.parsers.DocumentBuilder
 import javax.xml.parsers.ParserConfigurationException;
 
 import org.apache.hadoop.mapreduce.MRConfig;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.w3c.dom.Document;
 import org.w3c.dom.NodeList;
 import org.w3c.dom.Node;
@@ -159,11 +159,11 @@ class ParsedConfigFile {
           }
         }
 
-        if (JobContext.QUEUE_NAME.equals(attr) && value != null) {
+        if (MRJobConfig.QUEUE_NAME.equals(attr) && value != null) {
           queue = value;
         }
 
-        if (JobContext.JOB_NAME.equals(attr) && value != null) {
+        if (MRJobConfig.JOB_NAME.equals(attr) && value != null) {
           jobName = value;
         }
 
@@ -173,9 +173,9 @@ class ParsedConfigFile {
             maybeGetIntValue(MRConfig.REDUCEMEMORY_MB, attr, value,
                 clusterReduceMB);
         jobMapMB =
-            maybeGetIntValue(JobContext.MAP_MEMORY_MB, attr, value, jobMapMB);
+            maybeGetIntValue(MRJobConfig.MAP_MEMORY_MB, attr, value, jobMapMB);
         jobReduceMB =
-            maybeGetIntValue(JobContext.REDUCE_MEMORY_MB, attr, value,
+            maybeGetIntValue(MRJobConfig.REDUCE_MEMORY_MB, attr, value,
                 jobReduceMB);
       }
 



Mime
View raw message