Return-Path: X-Original-To: apmail-hadoop-hdfs-user-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-user-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id B0198E253 for ; Thu, 31 Jan 2013 17:44:44 +0000 (UTC) Received: (qmail 31005 invoked by uid 500); 31 Jan 2013 17:44:39 -0000 Delivered-To: apmail-hadoop-hdfs-user-archive@hadoop.apache.org Received: (qmail 30733 invoked by uid 500); 31 Jan 2013 17:44:39 -0000 Mailing-List: contact user-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: user@hadoop.apache.org Delivered-To: mailing list user@hadoop.apache.org Received: (qmail 30726 invoked by uid 99); 31 Jan 2013 17:44:39 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 31 Jan 2013 17:44:39 +0000 X-ASF-Spam-Status: No, hits=1.7 required=5.0 tests=FREEMAIL_ENVFROM_END_DIGIT,HTML_MESSAGE,RCVD_IN_DNSWL_LOW,SPF_PASS X-Spam-Check-By: apache.org Received-SPF: pass (athena.apache.org: domain of vikascjadhav87@gmail.com designates 209.85.216.169 as permitted sender) Received: from [209.85.216.169] (HELO mail-qc0-f169.google.com) (209.85.216.169) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 31 Jan 2013 17:44:33 +0000 Received: by mail-qc0-f169.google.com with SMTP id t2so1392218qcq.28 for ; Thu, 31 Jan 2013 09:44:12 -0800 (PST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=gmail.com; s=20120113; h=mime-version:x-received:in-reply-to:references:date:message-id :subject:from:to:content-type; bh=BuAe2Dtb0+w+4myffLJE5eYENQu6dBVunlvqEyOqqsA=; b=Q+13EMFaLXSF85113EGtHubOo42hn4nCa7RZWZp/wcjKmjW3N7znBUeVW//L2bWYE5 usY+pv3csX96FlpokKRsAbCytligmkjj6FBnACR4aSISPYt6VRto6NtLByz7Z2+l6Cgg sWy88tTs3HOgzaOrJE5Kw7l9YSHWT8BMUT6g7bR1o5AP4J/IMrPW0lEvWkxIORPWvryD lohlTGpnpCYrlOCnLQofJO42xGgY/ajuqGAsO7CJ/wTaOCN/Iof0EcMXMeePuIIi/8up MllkuRg2qOfxTDRfNNBAVNMrE8GrEMzP1gTPLz7c1gadW9/NMWEbwdrAUcoXJi/4gQSF FQ8Q== MIME-Version: 1.0 X-Received: by 10.224.58.84 with SMTP id f20mr9689836qah.60.1359654251982; Thu, 31 Jan 2013 09:44:11 -0800 (PST) Received: by 10.49.118.166 with HTTP; Thu, 31 Jan 2013 09:44:11 -0800 (PST) In-Reply-To: References: Date: Thu, 31 Jan 2013 23:14:11 +0530 Message-ID: Subject: Re: Issue with Reduce Side join using datajoin package From: Vikas Jadhav To: user@hadoop.apache.org Content-Type: multipart/alternative; boundary=20cf3074d466b4eff904d4992c40 X-Virus-Checked: Checked by ClamAV on apache.org --20cf3074d466b4eff904d4992c40 Content-Type: text/plain; charset=ISO-8859-1 ***************source **************** public class MyJoin extends Configured implements Tool { public static class MapClass extends DataJoinMapperBase { protected Text generateInputTag(String inputFile) { System.out.println("Starting generateInputTag() : "+inputFile); String datasource = inputFile.split("-")[0]; return new Text(datasource); } protected Text generateGroupKey(TaggedMapOutput aRecord) { System.out.println(" Statring generateGroupKey() : "+aRecord); String line = ((Text) aRecord.getData()).toString(); String[] tokens = line.split(","); String groupKey = tokens[0]; return new Text(groupKey); } protected TaggedMapOutput generateTaggedMapOutput(Object value) { System.out.println("starting generateTaggedMapOutput() value : "+value); TaggedWritable retv = new TaggedWritable((Text) value); retv.setTag(this.inputTag); return retv; } } public static class Reduce extends DataJoinReducerBase { protected TaggedMapOutput combine(Object[] tags, Object[] values) { System.out.println("combine :"); if (tags.length < 2) return null; String joinedStr = ""; for (int i=0; i 0) joinedStr += ","; TaggedWritable tw = (TaggedWritable) values[i]; String line = ((Text) tw.getData()).toString(); String[] tokens = line.split(",", 2); joinedStr += tokens[1]; } TaggedWritable retv = new TaggedWritable(new Text(joinedStr)); retv.setTag((Text) tags[0]); return retv; } } public static class TaggedWritable extends TaggedMapOutput { private Writable data; public TaggedWritable() { this.tag = new Text(); }//end empty( taking no parameters) constructor TaggedWritable public TaggedWritable(Writable data) { this.tag = new Text(""); this.data = data; } public Writable getData() { return data; } public void write(DataOutput out) throws IOException { //System.out.println("); this.tag.write(out); this.data.write(out); System.out.println("Tag :"+tag+" Data :"+ data); } /* public void readFields(DataInput in) throws IOException { System.out.println(" Starting short readFields(): "+ in); this.tag.readFields(in); this.data.readFields(in); } */ public void readFields(DataInput in) throws IOException { System.out.println(" Starting short readFields(): "+ in); this.tag.readFields(in); String w = in.toString(); if(this.data == null) try { this.data =(Writable) ReflectionUtils.newInstance(Class.forName(w), null); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } this.data.readFields(in); } } public int run(String[] args) throws Exception { System.out.println("Starting run() Method:"); Configuration conf = getConf(); conf.addResource(new Path("/home/vikas/project/hadoop-1.0.3/conf/core-site.xml")); conf.addResource(new Path("/home/vikas/project/hadoop-1.0.3/conf/mapred-site.xml")); conf.addResource(new Path("/home/vikas/project/hadoop-1.0.3/conf/hdfs-site.xml")); JobConf job = new JobConf(conf, MyJoin.class); Path in = new Path(args[0]); Path out = new Path(args[1]); FileInputFormat.setInputPaths(job, in); FileOutputFormat.setOutputPath(job, out); job.setJobName("DataJoin_cust X order"); job.setMapperClass(MapClass.class); job.setReducerClass(Reduce.class); job.setInputFormat(TextInputFormat.class); job.setOutputFormat(TextOutputFormat.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(TaggedWritable.class); job.set("mapred.textoutputformat.separator", ","); JobClient.runJob(job); return 0; } public static void main(String[] args) throws Exception { System.out.println("Starting main() function:"); int res = ToolRunner.run(new Configuration(), new MyJoin(), args); System.exit(res); } } *************************and error********************************************* 13/01/31 23:04:26 INFO util.NativeCodeLoader: Loaded the native-hadoop library 13/01/31 23:04:26 WARN snappy.LoadSnappy: Snappy native library not loaded 13/01/31 23:04:26 INFO mapred.FileInputFormat: Total input paths to process : 2 13/01/31 23:04:26 INFO mapred.JobClient: Running job: job_201301312254_0004 13/01/31 23:04:27 INFO mapred.JobClient: map 0% reduce 0% 13/01/31 23:04:41 INFO mapred.JobClient: map 66% reduce 0% 13/01/31 23:04:47 INFO mapred.JobClient: map 100% reduce 0% 13/01/31 23:04:50 INFO mapred.JobClient: map 100% reduce 22% 13/01/31 23:04:58 INFO mapred.JobClient: Task Id : attempt_201301312254_0004_r_000000_0, Status : FAILED java.lang.NullPointerException at MyJoin$TaggedWritable.readFields(MyJoin.java:125) at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:67) at org.apache.hadoop.io.serializer.WritableSerialization$WritableDeserializer.deserialize(WritableSerialization.java:40) at org.apache.hadoop.mapred.Task$ValuesIterator.readNextValue(Task.java:1271) at org.apache.hadoop.mapred.Task$ValuesIterator.next(Task.java:1211) at org.apache.hadoop.mapred.ReduceTask$ReduceValuesIterator.moveToNext(ReduceTask.java:249) at org.apache.hadoop.mapred.ReduceTask$ReduceValuesIterator.next(ReduceTask.java:245) at com.sas.join.DataJoinReducerBase.regroup(DataJoinReducerBase.java:107) at com.sas.join.DataJoinReducerBase.reduce(DataJoinReducerBase.java:132) at org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:519) at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:420) at org.apache.hadoop.mapred.Child$4.run(Child.java:255) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:416) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121) at org.apache.hadoop.mapred.Child.main(Child.java:249) --20cf3074d466b4eff904d4992c40 Content-Type: text/html; charset=ISO-8859-1 Content-Transfer-Encoding: quoted-printable ***************source ****************



public class MyJoin e= xtends Configured implements Tool {
=A0=A0=A0
=A0=A0=A0
=A0=A0= =A0
=A0=A0=A0 public static class MapClass extends DataJoinMapperBase {=
=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 protected Text generate= InputTag(String inputFile) {
=A0=A0=A0=A0=A0=A0=A0 =A0=A0=A0 System.out.println("Starting generateI= nputTag() : "+inputFile);
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 String = datasource =3D inputFile.split("-")[0];
=A0=A0=A0=A0=A0=A0=A0= =A0=A0=A0=A0 return new Text(datasource);
=A0=A0=A0=A0=A0=A0=A0 }
=A0= =A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 protected Text generateGroupKey(TaggedMapOutput aReco= rd) {
=A0=A0=A0=A0=A0=A0=A0 =A0=A0=A0 System.out.println(" Statring= generateGroupKey() : "+aRecord);
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0= String line =3D ((Text) aRecord.getData()).toString();
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 String[] tokens =3D line.split(",&qu= ot;);
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 String groupKey =3D tokens[0];=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 return new Text(groupKey);
=A0=A0=A0= =A0=A0=A0=A0 }
=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 protected= TaggedMapOutput generateTaggedMapOutput(Object value) {
=A0=A0=A0=A0=A0=A0=A0 =A0=A0=A0 System.out.println("starting=A0 genera= teTaggedMapOutput() value=A0 : "+value);
=A0=A0=A0=A0=A0=A0=A0 =A0= =A0=A0 TaggedWritable retv =3D new TaggedWritable((Text) value);
=A0=A0= =A0=A0=A0=A0=A0=A0=A0=A0=A0 retv.setTag(this.inputTag);
=A0=A0=A0=A0=A0= =A0=A0=A0=A0=A0=A0 return retv;
=A0=A0=A0=A0=A0=A0=A0 }
=A0=A0=A0 }
=A0=A0=A0
=A0=A0=A0 public st= atic class Reduce extends DataJoinReducerBase {
=A0=A0=A0=A0=A0=A0=A0 =A0=A0=A0=A0=A0=A0=A0 protected TaggedMapOutput combine(Object[] tags, Ob= ject[] values) {
=A0=A0=A0=A0=A0=A0=A0 =A0=A0=A0 =A0System.out.println(&= quot;combine :");
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 if (tags.length < 2) return null;=A0 <= br>=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 String joinedStr =3D ""; =A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 for (int i=3D0; i<values.length; i++)= {
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 if (i > 0) joinedStr= +=3D ",";
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 Tagge= dWritable tw =3D (TaggedWritable) values[i];
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 String line =3D ((Text) tw.ge= tData()).toString();
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 Strin= g[] tokens =3D line.split(",", 2);
=A0=A0=A0=A0=A0=A0=A0=A0=A0= =A0=A0=A0=A0=A0=A0 joinedStr +=3D tokens[1];
=A0=A0=A0=A0=A0=A0=A0=A0=A0= =A0=A0 }
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 TaggedWritable retv =3D new T= aggedWritable(new Text(joinedStr));
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 retv.setTag((Text) tags[0]);
=A0=A0= =A0=A0=A0=A0=A0=A0=A0=A0=A0 return retv;
=A0=A0=A0=A0=A0=A0=A0 }
=A0= =A0=A0 }
=A0=A0=A0
=A0=A0=A0 public static class TaggedWritable exte= nds TaggedMapOutput {
=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 private Writab= le data;
=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 public TaggedWritable()
= =A0=A0=A0=A0=A0=A0=A0 {
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 this.tag =3D n= ew Text();
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 }= //end empty( taking no parameters) constructor TaggedWritable
=A0=A0=A0= =A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 public TaggedWritable(Writable data)= {
=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 this.tag =3D ne= w Text("");
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 this.data =3D da= ta;
=A0=A0=A0=A0=A0=A0=A0 }
=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0= =A0=A0 public Writable getData() {
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 ret= urn data;
=A0=A0=A0=A0=A0=A0=A0 }
=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0= =A0=A0=A0=A0 public void write(DataOutput out) throws IOException {
=A0=A0=A0=A0=A0=A0=A0 =A0=A0=A0 //System.out.println(");
=A0=A0=A0= =A0=A0=A0=A0=A0=A0=A0=A0 this.tag.write(out);
=A0=A0=A0=A0=A0=A0=A0=A0= =A0=A0=A0 this.data.write(out);
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 System= .out.println("Tag=A0 :"+tag+" Data=A0 :"+ data);
=A0= =A0=A0=A0=A0=A0=A0 }
=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 /*
=A0=A0=A0=A0=A0=A0=A0 public void readFields(Da= taInput in) throws IOException {
=A0=A0=A0=A0=A0=A0=A0 =A0=A0=A0 System.= out.println(" Starting short readFields(): "+ in);
=A0=A0=A0= =A0=A0=A0=A0 =A0=A0=A0 this.tag.readFields(in);
=A0=A0=A0=A0=A0=A0=A0=A0= =A0=A0=A0 this.data.readFields(in);
=A0=A0=A0=A0=A0=A0=A0 } */
=A0=A0=A0=A0=A0=A0=A0

=A0=A0=A0=A0=A0= =A0=A0 public void readFields(DataInput in) throws IOException {
=A0=A0= =A0=A0=A0=A0=A0 =A0=A0=A0 System.out.println(" Starting short readFiel= ds(): "+ in);
=A0=A0=A0=A0=A0=A0=A0 =A0=A0=A0 this.tag.readFields(i= n);
=A0=A0=A0=A0=A0=A0=A0 =A0=A0=A0 String w =3D in.toString();
=A0=A0=A0=A0= =A0=A0=A0 =A0=A0=A0 if(this.data =3D=3D null)
=A0=A0=A0 =A0=A0=A0 =A0=A0= =A0 =A0=A0=A0 try {
=A0=A0=A0 =A0=A0=A0 =A0=A0=A0 =A0=A0=A0 =A0=A0=A0 th= is.data =3D(Writable) ReflectionUtils.newInstance(Class.forName(w), null);<= br>=A0=A0=A0 =A0=A0=A0 =A0=A0=A0 =A0=A0=A0 } catch (ClassNotFoundException = e) {
=A0=A0=A0 =A0=A0=A0 =A0=A0=A0 =A0=A0=A0 =A0=A0=A0 // TODO Auto-generated ca= tch block
=A0=A0=A0 =A0=A0=A0 =A0=A0=A0 =A0=A0=A0 =A0=A0=A0 e.printStack= Trace();
=A0=A0=A0 =A0=A0=A0 =A0=A0=A0 =A0=A0=A0 }
=A0=A0=A0=A0=A0=A0= =A0=A0=A0=A0=A0 this.data.readFields(in);
=A0=A0=A0=A0=A0=A0=A0 }
=A0= =A0
=A0=A0
=A0=A0=A0 }
=A0=A0=A0 public int run(String[] args) t= hrows Exception {
=A0=A0=A0 =A0=A0=A0 System.out.println("Starting run() Method:");=
=A0=A0=A0=A0=A0=A0=A0 Configuration conf =3D getConf();
=A0=A0=A0=A0= =A0=A0=A0 conf.addResource(new Path("/home/vikas/project/hadoop-1.0.3/= conf/core-site.xml"));
=A0=A0=A0=A0=A0=A0=A0 conf.addResource(new P= ath("/home/vikas/project/hadoop-1.0.3/conf/mapred-site.xml")); =A0=A0=A0=A0=A0=A0=A0 conf.addResource(new Path("/home/vikas/project/h= adoop-1.0.3/conf/hdfs-site.xml"));
=A0=A0=A0=A0=A0=A0=A0 JobConf jo= b =3D new JobConf(conf, MyJoin.class);
=A0=A0=A0=A0=A0=A0=A0 Path in =3D= new Path(args[0]);
=A0=A0=A0=A0=A0=A0=A0 Path out =3D new Path(args[1])= ;
=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 FileInputFormat.setInputPaths(job= , in);

=A0=A0=A0=A0=A0=A0=A0 FileOutputFormat.setOutputPath(job, out= );
=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 job.setJobName("= DataJoin_cust X order");
=A0=A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0= =A0=A0 job.setMapperClass(MapClass.class);
=A0=A0=A0=A0=A0=A0=A0 job.setReducerClass(Reduce.class);
=A0=A0=A0=A0=A0= =A0=A0
=A0=A0=A0=A0=A0=A0=A0 job.setInputFormat(TextInputFormat.class);=
=A0=A0=A0=A0=A0=A0=A0 job.setOutputFormat(TextOutputFormat.class);
= =A0=A0=A0=A0=A0=A0=A0 job.setOutputKeyClass(Text.class);
=A0=A0=A0=A0=A0= =A0=A0 job.setOutputValueClass(TaggedWritable.class);
=A0=A0=A0=A0=A0=A0=A0 job.set("mapred.textoutputformat.separator"= , ",");
=A0=A0=A0=A0=A0=A0=A0 JobClient.runJob(job);
=A0= =A0=A0=A0=A0=A0=A0 return 0;
=A0=A0=A0 }
=A0=A0=A0
=A0=A0=A0 publ= ic static void main(String[] args) throws Exception {
=A0=A0=A0 =A0=A0= =A0 System.out.println("Starting main() function:");
=A0=A0=A0=A0=A0=A0=A0 int res =3D ToolRunner.run(new Configuration(),
= =A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0= =A0=A0=A0=A0=A0=A0=A0 new MyJoin(),
=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0= =A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0=A0 args);
=A0= =A0=A0=A0=A0=A0=A0
=A0=A0=A0=A0=A0=A0=A0 System.exit(res);
=A0=A0=A0= }
}



*************************and error******************= ***************************


13/01/31 23:04:26 INFO util.NativeCodeLoader: Loaded the native-ha= doop library
13/01/31 23:04:26 WARN snappy.LoadSnappy: Snappy native lib= rary not loaded
13/01/31 23:04:26 INFO mapred.FileInputFormat: Total input paths to process= : 2
13/01/31 23:04:26 INFO mapred.JobClient: Running job: job_201301312= 254_0004
13/01/31 23:04:27 INFO mapred.JobClient:=A0 map 0% reduce 0% 13/01/31 23:04:41 INFO mapred.JobClient:=A0 map 66% reduce 0%
13/01/31 2= 3:04:47 INFO mapred.JobClient:=A0 map 100% reduce 0%
13/01/31 23:04:50 I= NFO mapred.JobClient:=A0 map 100% reduce 22%
13/01/31 23:04:58 INFO mapr= ed.JobClient: Task Id : attempt_201301312254_0004_r_000000_0, Status : FAIL= ED
java.lang.NullPointerException
=A0=A0=A0 at MyJoin$TaggedWritable.readFi= elds(MyJoin.java:125)
=A0=A0=A0 at org.apache.hadoop.io.serializer.Writa= bleSerialization$WritableDeserializer.deserialize(WritableSerialization.jav= a:67)
=A0=A0=A0 at org.apache.hadoop.io.serializer.WritableSerialization$Writable= Deserializer.deserialize(WritableSerialization.java:40)
=A0=A0=A0 at org= .apache.hadoop.mapred.Task$ValuesIterator.readNextValue(Task.java:1271)
= =A0=A0=A0 at org.apache.hadoop.mapred.Task$ValuesIterator.next(Task.java:12= 11)
=A0=A0=A0 at org.apache.hadoop.mapred.ReduceTask$ReduceValuesIterator.moveT= oNext(ReduceTask.java:249)
=A0=A0=A0 at org.apache.hadoop.mapred.ReduceT= ask$ReduceValuesIterator.next(ReduceTask.java:245)
=A0=A0=A0 at com.sas.= join.DataJoinReducerBase.regroup(DataJoinReducerBase.java:107)
=A0=A0=A0 at com.sas.join.DataJoinReducerBase.reduce(DataJoinReducerBase.ja= va:132)
=A0=A0=A0 at org.apache.hadoop.mapred.ReduceTask.runOldReducer(R= educeTask.java:519)
=A0=A0=A0 at org.apache.hadoop.mapred.ReduceTask.run= (ReduceTask.java:420)
=A0=A0=A0 at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
=A0=A0= =A0 at java.security.AccessController.doPrivileged(Native Method)
=A0=A0= =A0 at javax.security.auth.Subject.doAs(Subject.java:416)
=A0=A0=A0 at o= rg.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.ja= va:1121)
=A0=A0=A0 at org.apache.hadoop.mapred.Child.main(Child.java:249)

--20cf3074d466b4eff904d4992c40--