Return-Path: X-Original-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 1EDFA7134 for ; Fri, 30 Sep 2011 19:21:36 +0000 (UTC) Received: (qmail 24605 invoked by uid 500); 30 Sep 2011 19:21:36 -0000 Delivered-To: apmail-hadoop-mapreduce-commits-archive@hadoop.apache.org Received: (qmail 24580 invoked by uid 500); 30 Sep 2011 19:21:35 -0000 Mailing-List: contact mapreduce-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: mapreduce-dev@hadoop.apache.org Delivered-To: mailing list mapreduce-commits@hadoop.apache.org Received: (qmail 24571 invoked by uid 99); 30 Sep 2011 19:21:35 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 30 Sep 2011 19:21:35 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 30 Sep 2011 19:21:33 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id F2E5723888EA; Fri, 30 Sep 2011 19:21:11 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1177787 - in /hadoop/common/branches/branch-0.22/mapreduce: CHANGES.txt src/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java Date: Fri, 30 Sep 2011 19:21:11 -0000 To: mapreduce-commits@hadoop.apache.org From: shv@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20110930192111.F2E5723888EA@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: shv Date: Fri Sep 30 19:21:11 2011 New Revision: 1177787 URL: http://svn.apache.org/viewvc?rev=1177787&view=rev Log: MAPREDUCE-2779. JobSplitWriter.java can't handle large job.split file. Contributed by Ming Ma. Modified: hadoop/common/branches/branch-0.22/mapreduce/CHANGES.txt hadoop/common/branches/branch-0.22/mapreduce/src/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java Modified: hadoop/common/branches/branch-0.22/mapreduce/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.22/mapreduce/CHANGES.txt?rev=1177787&r1=1177786&r2=1177787&view=diff ============================================================================== --- hadoop/common/branches/branch-0.22/mapreduce/CHANGES.txt (original) +++ hadoop/common/branches/branch-0.22/mapreduce/CHANGES.txt Fri Sep 30 19:21:11 2011 @@ -613,6 +613,9 @@ Release 0.22.0 - Unreleased MAPREDUCE-3026. Fix NPE in mapred queue -list with hierarchical queues. (Mayank Bansal via shv) + MAPREDUCE-2779. JobSplitWriter.java can't handle large job.split file. + (Ming Ma via shv) + Release 0.21.1 - Unreleased NEW FEATURES Modified: hadoop/common/branches/branch-0.22/mapreduce/src/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.22/mapreduce/src/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java?rev=1177787&r1=1177786&r2=1177787&view=diff ============================================================================== --- hadoop/common/branches/branch-0.22/mapreduce/src/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java (original) +++ hadoop/common/branches/branch-0.22/mapreduce/src/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java Fri Sep 30 19:21:11 2011 @@ -114,15 +114,15 @@ public class JobSplitWriter { if (array.length != 0) { SerializationFactory factory = new SerializationFactory(conf); int i = 0; - long offset = out.size(); + long offset = out.getPos(); for(T split: array) { - int prevCount = out.size(); + long prevCount = out.getPos(); Text.writeString(out, split.getClass().getName()); Serializer serializer = factory.getSerializer((Class) split.getClass()); serializer.open(out); serializer.serialize(split); - int currCount = out.size(); + long currCount = out.getPos(); info[i++] = new JobSplit.SplitMetaInfo( split.getLocations(), offset, @@ -139,12 +139,12 @@ public class JobSplitWriter { SplitMetaInfo[] info = new SplitMetaInfo[splits.length]; if (splits.length != 0) { int i = 0; - long offset = out.size(); + long offset = out.getPos(); for(org.apache.hadoop.mapred.InputSplit split: splits) { - int prevLen = out.size(); + long prevLen = out.getPos(); Text.writeString(out, split.getClass().getName()); split.write(out); - int currLen = out.size(); + long currLen = out.getPos(); info[i++] = new JobSplit.SplitMetaInfo( split.getLocations(), offset, split.getLength());