Return-Path: Delivered-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Received: (qmail 90958 invoked from network); 1 Jun 2010 04:56:11 -0000 Received: from unknown (HELO mail.apache.org) (140.211.11.3) by 140.211.11.9 with SMTP; 1 Jun 2010 04:56:11 -0000 Received: (qmail 19806 invoked by uid 500); 1 Jun 2010 04:56:11 -0000 Delivered-To: apmail-hadoop-mapreduce-commits-archive@hadoop.apache.org Received: (qmail 19728 invoked by uid 500); 1 Jun 2010 04:56:09 -0000 Mailing-List: contact mapreduce-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: mapreduce-dev@hadoop.apache.org Delivered-To: mailing list mapreduce-commits@hadoop.apache.org Received: (qmail 19720 invoked by uid 99); 1 Jun 2010 04:56:08 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 01 Jun 2010 04:56:08 +0000 X-ASF-Spam-Status: No, hits=-1177.5 required=10.0 tests=ALL_TRUSTED,AWL X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 01 Jun 2010 04:56:06 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id BDCB6238897F; Tue, 1 Jun 2010 04:55:46 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r949931 - in /hadoop/mapreduce/trunk: ./ src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/ src/java/org/apache/hadoop/mapred/ src/java/org/apache/hadoop/mapreduce/ src/java/org/apache/hadoop/mapreduce/task/ src/java/org/apache/... Date: Tue, 01 Jun 2010 04:55:46 -0000 To: mapreduce-commits@hadoop.apache.org From: sharad@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20100601045546.BDCB6238897F@eris.apache.org> Author: sharad Date: Tue Jun 1 04:55:45 2010 New Revision: 949931 URL: http://svn.apache.org/viewvc?rev=949931&view=rev Log: MAPREDUCE-118. Fix Job.getJobID(). Contributed by Amareshwari Sriramadasu. Modified: hadoop/mapreduce/trunk/CHANGES.txt hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/Gridmix.java hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobMonitor.java hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobSubmitter.java hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/tools/CLI.java hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobDirCleanup.java hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgressListener.java hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestQueueManagerWithJobTracker.java hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestJobACLs.java hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java Modified: hadoop/mapreduce/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/CHANGES.txt (original) +++ hadoop/mapreduce/trunk/CHANGES.txt Tue Jun 1 04:55:45 2010 @@ -1623,3 +1623,5 @@ Release 0.21.0 - Unreleased MAPREDUCE-1372. ConcurrentModificationException in JobInProgress. (Dick King and Amareshwari Sriramadasu via tomwhite) + + MAPREDUCE-118. Fix Job.getJobID(). (Amareshwari Sriramadasu via sharad) Modified: hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/Gridmix.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/Gridmix.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/Gridmix.java (original) +++ hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/Gridmix.java Tue Jun 1 04:55:45 2010 @@ -296,7 +296,7 @@ public class Gridmix extends Configured try { if (!job.isComplete()) { job.killJob(); - LOG.info("Killed " + job.getJobName() + " (" + job.getID() + ")"); + LOG.info("Killed " + job.getJobName() + " (" + job.getJobID() + ")"); } else { if (job.isSuccessful()) { monitor.onSuccess(job); Modified: hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobMonitor.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobMonitor.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobMonitor.java (original) +++ hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobMonitor.java Tue Jun 1 04:55:45 2010 @@ -92,14 +92,14 @@ class JobMonitor implements Gridmix.Comp * Temporary hook for recording job success. */ protected void onSuccess(Job job) { - LOG.info(job.getJobName() + " (" + job.getID() + ")" + " success"); + LOG.info(job.getJobName() + " (" + job.getJobID() + ")" + " success"); } /** * Temporary hook for recording job failure. */ protected void onFailure(Job job) { - LOG.info(job.getJobName() + " (" + job.getID() + ")" + " failure"); + LOG.info(job.getJobName() + " (" + job.getJobID() + ")" + " failure"); } /** Modified: hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobSubmitter.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobSubmitter.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobSubmitter.java (original) +++ hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobSubmitter.java Tue Jun 1 04:55:45 2010 @@ -99,7 +99,7 @@ class JobSubmitter implements Gridmix.Co // submit job monitor.add(job.call()); LOG.debug("SUBMIT " + job + "@" + System.currentTimeMillis() + - " (" + job.getJob().getID() + ")"); + " (" + job.getJob().getJobID() + ")"); } catch (IOException e) { LOG.warn("Failed to submit " + job.getJob().getJobName(), e); if (e.getCause() instanceof ClosedByInterruptException) { Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original) +++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Tue Jun 1 04:55:45 2010 @@ -39,7 +39,6 @@ import org.apache.hadoop.mapreduce.filec import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.mapreduce.tools.CLI; import org.apache.hadoop.mapreduce.util.ConfigUtil; -import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; @@ -175,7 +174,7 @@ public class JobClient extends CLI { * An identifier for the job */ public JobID getID() { - return JobID.downgrade(job.getID()); + return JobID.downgrade(job.getJobID()); } /** @deprecated This method is deprecated and will be removed. Applications should Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java (original) +++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java Tue Jun 1 04:55:45 2010 @@ -594,8 +594,8 @@ public class LocalJobRunner implements C } public void killJob(org.apache.hadoop.mapreduce.JobID id) { - jobs.get(id).killed = true; - jobs.get(id).interrupt(); + jobs.get(JobID.downgrade(id)).killed = true; + jobs.get(JobID.downgrade(id)).interrupt(); } public void setJobPriority(org.apache.hadoop.mapreduce.JobID id, Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java (original) +++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Job.java Tue Jun 1 04:55:45 2010 @@ -148,8 +148,9 @@ public class Job extends JobContextImpl Job(Cluster cluster, JobStatus status, Configuration conf) throws IOException { this(cluster, conf); - state = JobState.RUNNING; + setJobID(status.getJobID()); this.status = status; + state = JobState.RUNNING; } public static Job getInstance(Cluster cluster) throws IOException { @@ -201,15 +202,6 @@ public class Job extends JobContextImpl updateStatus(); return status; } - /** - * Get the job identifier. - * - * @return the job identifier. - */ - public JobID getID() { - ensureState(JobState.RUNNING); - return status.getJobID(); - } /** * Returns the current state of the Job. @@ -348,7 +340,7 @@ public class Job extends JobContextImpl public TaskReport[] getTaskReports(TaskType type) throws IOException, InterruptedException { ensureState(JobState.RUNNING); - return cluster.getClient().getTaskReports(getID(), type); + return cluster.getClient().getTaskReports(getJobID(), type); } /** @@ -436,7 +428,7 @@ public class Job extends JobContextImpl */ public void killJob() throws IOException, InterruptedException { ensureState(JobState.RUNNING); - cluster.getClient().killJob(getID()); + cluster.getClient().killJob(getJobID()); } /** @@ -451,7 +443,7 @@ public class Job extends JobContextImpl org.apache.hadoop.mapred.JobPriority.valueOf(priority.name())); } else { ensureState(JobState.RUNNING); - cluster.getClient().setJobPriority(getID(), priority.toString()); + cluster.getClient().setJobPriority(getJobID(), priority.toString()); } } @@ -466,7 +458,7 @@ public class Job extends JobContextImpl public TaskCompletionEvent[] getTaskCompletionEvents(int startFrom, int numEvents) throws IOException, InterruptedException { ensureState(JobState.RUNNING); - return cluster.getClient().getTaskCompletionEvents(getID(), + return cluster.getClient().getTaskCompletionEvents(getJobID(), startFrom, numEvents); } @@ -503,7 +495,7 @@ public class Job extends JobContextImpl public Counters getCounters() throws IOException, InterruptedException { ensureState(JobState.RUNNING); - return cluster.getClient().getJobCounters(getID()); + return cluster.getClient().getJobCounters(getJobID()); } /** @@ -1017,7 +1009,7 @@ public class Job extends JobContextImpl Job.TaskStatusFilter filter; Configuration clientConf = cluster.getConf(); filter = Job.getTaskOutputFilter(clientConf); - JobID jobId = getID(); + JobID jobId = getJobID(); LOG.info("Running job: " + jobId); int eventCounter = 0; boolean profiling = getProfileEnabled(); Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java (original) +++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/JobSubmitter.java Tue Jun 1 04:55:45 2010 @@ -318,6 +318,7 @@ class JobSubmitter { //configure the command line options correctly on the submitting dfs Configuration conf = job.getConfiguration(); JobID jobId = submitClient.getNewJobID(); + job.setJobID(jobId); Path submitJobDir = new Path(jobStagingArea, jobId.toString()); JobStatus status = null; try { Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java (original) +++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java Tue Jun 1 04:55:45 2010 @@ -50,7 +50,7 @@ import org.apache.hadoop.security.UserGr public class JobContextImpl implements JobContext { protected final org.apache.hadoop.mapred.JobConf conf; - private final JobID jobId; + private JobID jobId; /** * The UserGroupInformation object that has a reference to the current user */ @@ -83,6 +83,13 @@ public class JobContextImpl implements J } /** + * Set the JobID. + */ + public void setJobID(JobID jobId) { + this.jobId = jobId; + } + + /** * Get configured the number of reduce tasks for this job. Defaults to * 1. * @return the number of reduce tasks for this job. Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/tools/CLI.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/tools/CLI.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/tools/CLI.java (original) +++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/tools/CLI.java Tue Jun 1 04:55:45 2010 @@ -217,7 +217,7 @@ public class CLI extends Configured impl if (submitJobFile != null) { Job job = Job.getInstance(cluster, new JobConf(submitJobFile)); job.submit(); - System.out.println("Created job " + job.getID()); + System.out.println("Created job " + job.getJobID()); exitCode = 0; } else if (getStatus) { Job job = cluster.getJob(JobID.forName(jobid)); @@ -423,7 +423,7 @@ public class CLI extends Configured impl throws IOException, InterruptedException { TaskCompletionEvent[] events = job. getTaskCompletionEvents(fromEventId, numEvents); - System.out.println("Task completion events for " + job.getID()); + System.out.println("Task completion events for " + job.getJobID()); System.out.println("Number of events (from " + fromEventId + ") are: " + events.length); for(TaskCompletionEvent event: events) { @@ -529,7 +529,7 @@ public class CLI extends Configured impl System.out.println("JobId\tState\tStartTime\t" + "UserName\tPriority\tSchedulingInfo"); for (Job job : jobs) { - System.out.printf("%s\t%s\t%d\t%s\t%s\t%s\n", job.getID().toString(), + System.out.printf("%s\t%s\t%d\t%s\t%s\t%s\n", job.getJobID().toString(), job.getJobState(), job.getStartTime(), job.getUser(), job.getPriority().name(), job.getSchedulingInfo()); } Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobDirCleanup.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobDirCleanup.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobDirCleanup.java (original) +++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobDirCleanup.java Tue Jun 1 04:55:45 2010 @@ -42,7 +42,7 @@ public class TestJobDirCleanup extends T sleep.setConf(conf); Job job = sleep.createJob(1, 10, 1000, 1, 10000, 1); job.waitForCompletion(true); - return job.getID(); + return job.getJobID(); } public void testJobDirCleanup() throws Exception { Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgressListener.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgressListener.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgressListener.java (original) +++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgressListener.java Tue Jun 1 04:55:45 2010 @@ -387,7 +387,7 @@ public class TestJobInProgressListener e j.submit(); j.waitForCompletion(true); - JobID id = (org.apache.hadoop.mapred.JobID)j.getID(); + JobID id = JobID.downgrade(j.getJobID()); // check if the job is in the waiting queue assertFalse("Missing event notification on no-set-cleanup no task job", Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestQueueManagerWithJobTracker.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestQueueManagerWithJobTracker.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestQueueManagerWithJobTracker.java (original) +++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestQueueManagerWithJobTracker.java Tue Jun 1 04:55:45 2010 @@ -176,7 +176,7 @@ public class TestQueueManagerWithJobTrac //signal to the job. jip = tracker.getJob(org.apache.hadoop.mapred.JobID.downgrade(jobID)); tracker.initJob(jip); - tracker.killJob(job.getID()); + tracker.killJob(job.getJobID()); // kill the job by the user who submitted the job assertEquals("job submitted for u1 and queue p1:p11 is not killed.", cluster.getJob(jobID).getStatus().getState(), (State.KILLED)); Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java (original) +++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java Tue Jun 1 04:55:45 2010 @@ -404,9 +404,9 @@ public class TestTaskTrackerMemoryManage while (true) { List allTaskReports = new ArrayList(); allTaskReports.addAll(Arrays.asList(jClient - .getSetupTaskReports((org.apache.hadoop.mapred.JobID) job.getID()))); + .getSetupTaskReports(JobID.downgrade(job.getJobID())))); allTaskReports.addAll(Arrays.asList(jClient - .getMapTaskReports((org.apache.hadoop.mapred.JobID) job.getID()))); + .getMapTaskReports(JobID.downgrade(job.getJobID())))); for (TaskReport tr : allTaskReports) { String[] diag = tr.getDiagnostics(); for (String str : diag) { @@ -575,9 +575,9 @@ public class TestTaskTrackerMemoryManage while (true) { List allTaskReports = new ArrayList(); allTaskReports.addAll(Arrays.asList(jClient - .getSetupTaskReports((org.apache.hadoop.mapred.JobID) job.getID()))); + .getSetupTaskReports(JobID.downgrade(job.getJobID())))); allTaskReports.addAll(Arrays.asList(jClient - .getMapTaskReports((org.apache.hadoop.mapred.JobID) job.getID()))); + .getMapTaskReports(JobID.downgrade(job.getJobID())))); for (TaskReport tr : allTaskReports) { String[] diag = tr.getDiagnostics(); for (String str : diag) { Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java (original) +++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestWebUIAuthorization.java Tue Jun 1 04:55:45 2010 @@ -185,7 +185,7 @@ public class TestWebUIAuthorization exte private TaskID getTIPId(MiniMRCluster cluster, org.apache.hadoop.mapreduce.JobID jobid) throws Exception { JobClient client = new JobClient(cluster.createJobConf()); - JobID jobId = (JobID) jobid; + JobID jobId = JobID.downgrade(jobid); TaskReport[] mapReports = null; TaskID tipId = null; @@ -214,7 +214,7 @@ public class TestWebUIAuthorization exte JobConf conf, String jtURL, String jobTrackerJSP, String user) throws Exception { Job job = startSleepJobAsUser(jobSubmitter, conf); - org.apache.hadoop.mapreduce.JobID jobid = job.getID(); + org.apache.hadoop.mapreduce.JobID jobid = job.getJobID(); getTIPId(cluster, jobid);// wait till the map task is started // jobDetailsJSP killJob url String url = jtURL + "/jobdetails.jsp?" + @@ -283,7 +283,7 @@ public class TestWebUIAuthorization exte } }); - org.apache.hadoop.mapreduce.JobID jobid = job.getID(); + org.apache.hadoop.mapreduce.JobID jobid = job.getJobID(); String historyFileName = job.getStatus().getHistoryFile(); String jtURL = "http://localhost:" + infoPort; @@ -407,7 +407,7 @@ public class TestWebUIAuthorization exte String jobTrackerJSP = jtURL + "/jobtracker.jsp?a=b"; Job job = startSleepJobAsUser(jobSubmitter, conf); - org.apache.hadoop.mapreduce.JobID jobid = job.getID(); + org.apache.hadoop.mapreduce.JobID jobid = job.getJobID(); getTIPId(cluster, jobid);// wait till the map task is started // jobDetailsJSPKillJobAction url String url = jtURL + "/jobdetails.jsp?" + @@ -453,7 +453,7 @@ public class TestWebUIAuthorization exte throws Exception { String jobTrackerJSP = jtURL + "/jobtracker.jsp?a=b"; Job job = startSleepJobAsUser(jobSubmitter, conf); - org.apache.hadoop.mapreduce.JobID jobid = job.getID(); + org.apache.hadoop.mapreduce.JobID jobid = job.getJobID(); getTIPId(cluster, jobid);// wait till the map task is started // jobTrackerJSP killJob url String url = jobTrackerJSP + @@ -497,19 +497,19 @@ public class TestWebUIAuthorization exte // Out of these 4 users, only jobSubmitter can do killJob on 1st job conf.set(MRJobConfig.JOB_ACL_MODIFY_JOB, ""); Job job1 = startSleepJobAsUser(jobSubmitter, conf); - org.apache.hadoop.mapreduce.JobID jobid = job1.getID(); + org.apache.hadoop.mapreduce.JobID jobid = job1.getJobID(); getTIPId(cluster, jobid);// wait till the map task is started url = url.concat("&jobCheckBox=" + jobid.toString()); // start 2nd job. // Out of these 4 users, only viewColleague can do killJob on 2nd job Job job2 = startSleepJobAsUser(viewColleague, conf); - jobid = job2.getID(); + jobid = job2.getJobID(); getTIPId(cluster, jobid);// wait till the map task is started url = url.concat("&jobCheckBox=" + jobid.toString()); // start 3rd job. // Out of these 4 users, only modifyColleague can do killJob on 3rd job Job job3 = startSleepJobAsUser(modifyColleague, conf); - jobid = job3.getID(); + jobid = job3.getJobID(); getTIPId(cluster, jobid);// wait till the map task is started url = url.concat("&jobCheckBox=" + jobid.toString()); // start 4rd job. @@ -517,7 +517,7 @@ public class TestWebUIAuthorization exte // can do killJob on 4th job conf.set(MRJobConfig.JOB_ACL_MODIFY_JOB, viewColleague); Job job4 = startSleepJobAsUser(viewAndModifyColleague, conf); - jobid = job4.getID(); + jobid = job4.getJobID(); getTIPId(cluster, jobid);// wait till the map task is started url = url.concat("&jobCheckBox=" + jobid.toString()); @@ -590,7 +590,7 @@ public class TestWebUIAuthorization exte Job job = startSleepJobAsUser(jobSubmitter, conf); - org.apache.hadoop.mapreduce.JobID jobid = job.getID(); + org.apache.hadoop.mapreduce.JobID jobid = job.getJobID(); String jtURL = "http://localhost:" + infoPort; Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestJobACLs.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestJobACLs.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestJobACLs.java (original) +++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestJobACLs.java Tue Jun 1 04:55:45 2010 @@ -127,7 +127,7 @@ public class TestJobACLs { // Submit the job as user1 Job job = submitJobAsUser(myConf, "user1"); - final JobID jobId = job.getID(); + final JobID jobId = job.getJobID(); // Try operations as an unauthorized user. verifyViewJobAsUnauthorizedUser(myConf, jobId, "user2"); @@ -261,7 +261,7 @@ public class TestJobACLs { // Submit the job as user1 Job job = submitJobAsUser(myConf, "user1"); - final JobID jobId = job.getID(); + final JobID jobId = job.getJobID(); // Try operations as an unauthorized user. verifyModifyJobAsUnauthorizedUser(myConf, jobId, "user2"); @@ -369,7 +369,7 @@ public class TestJobACLs { // Submit the job as user1 Job job = submitJobAsUser(myConf, "user1"); - final JobID jobId = job.getID(); + final JobID jobId = job.getJobID(); // Kill the job and wait till it is actually killed so that it is written to // CompletedJobStore Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java (original) +++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestMRJobClient.java Tue Jun 1 04:55:45 2010 @@ -68,7 +68,7 @@ public class TestMRJobClient extends Clu public void testJobClient() throws Exception { Configuration conf = createJobConf(); Job job = runJob(conf); - String jobId = job.getID().toString(); + String jobId = job.getJobID().toString(); testGetCounter(jobId, conf); testJobList(jobId, conf); testChangingJobPriority(jobId, conf); Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java (original) +++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java Tue Jun 1 04:55:45 2010 @@ -27,8 +27,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.mapred.HadoopTestCase; -import org.apache.hadoop.mapred.JobClient; -import org.apache.hadoop.mapred.JobID; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; @@ -51,12 +49,10 @@ public class TestNoJobSetupCleanup exten job.setOutputFormatClass(MyOutputFormat.class); job.waitForCompletion(true); assertTrue(job.isSuccessful()); - JobID jobid = (org.apache.hadoop.mapred.JobID)job.getID(); - JobClient jc = new JobClient(conf); - assertTrue(jc.getSetupTaskReports(jobid).length == 0); - assertTrue(jc.getCleanupTaskReports(jobid).length == 0); - assertTrue(jc.getMapTaskReports(jobid).length == numMaps); - assertTrue(jc.getReduceTaskReports(jobid).length == numReds); + assertTrue(job.getTaskReports(TaskType.JOB_SETUP).length == 0); + assertTrue(job.getTaskReports(TaskType.JOB_CLEANUP).length == 0); + assertTrue(job.getTaskReports(TaskType.MAP).length == numMaps); + assertTrue(job.getTaskReports(TaskType.REDUCE).length == numReds); FileSystem fs = FileSystem.get(conf); assertTrue("Job output directory doesn't exit!", fs.exists(outDir)); FileStatus[] list = fs.listStatus(outDir, new OutputFilter()); Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java?rev=949931&r1=949930&r2=949931&view=diff ============================================================================== --- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java (original) +++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java Tue Jun 1 04:55:45 2010 @@ -149,13 +149,13 @@ public class TestJobOutputCommitter exte assertTrue("Job failed!", job.waitForCompletion(true)); Path testFile = new Path(outDir, filename); - assertTrue("Done file missing for job " + job.getID(), fs.exists(testFile)); + assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile)); // check if the files from the missing set exists for (String ex : exclude) { Path file = new Path(outDir, ex); assertFalse("File " + file + " should not be present for successful job " - + job.getID(), fs.exists(file)); + + job.getJobID(), fs.exists(file)); } } @@ -170,7 +170,7 @@ public class TestJobOutputCommitter exte if (fileName != null) { Path testFile = new Path(outDir, fileName); - assertTrue("File " + testFile + " missing for failed job " + job.getID(), + assertTrue("File " + testFile + " missing for failed job " + job.getJobID(), fs.exists(testFile)); } @@ -178,7 +178,7 @@ public class TestJobOutputCommitter exte for (String ex : exclude) { Path file = new Path(outDir, ex); assertFalse("File " + file + " should not be present for failed job " - + job.getID(), fs.exists(file)); + + job.getJobID(), fs.exists(file)); } } @@ -202,7 +202,7 @@ public class TestJobOutputCommitter exte if (fileName != null) { Path testFile = new Path(outDir, fileName); - assertTrue("File " + testFile + " missing for job " + job.getID(), fs + assertTrue("File " + testFile + " missing for job " + job.getJobID(), fs .exists(testFile)); } @@ -210,7 +210,7 @@ public class TestJobOutputCommitter exte for (String ex : exclude) { Path file = new Path(outDir, ex); assertFalse("File " + file + " should not be present for killed job " - + job.getID(), fs.exists(file)); + + job.getJobID(), fs.exists(file)); } }