Return-Path: Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: (qmail 7162 invoked from network); 28 Nov 2010 18:22:44 -0000 Received: from unknown (HELO mail.apache.org) (140.211.11.3) by 140.211.11.9 with SMTP; 28 Nov 2010 18:22:44 -0000 Received: (qmail 44972 invoked by uid 500); 28 Nov 2010 18:22:44 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 44917 invoked by uid 500); 28 Nov 2010 18:22:43 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 44910 invoked by uid 500); 28 Nov 2010 18:22:43 -0000 Delivered-To: apmail-hadoop-core-commits@hadoop.apache.org Received: (qmail 44907 invoked by uid 99); 28 Nov 2010 18:22:43 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Sun, 28 Nov 2010 18:22:43 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.131] (HELO eos.apache.org) (140.211.11.131) by apache.org (qpsmtpd/0.29) with ESMTP; Sun, 28 Nov 2010 18:22:40 +0000 Received: from eosnew.apache.org (localhost [127.0.0.1]) by eos.apache.org (Postfix) with ESMTP id 76D798E7; Sun, 28 Nov 2010 18:22:19 +0000 (UTC) MIME-Version: 1.0 Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable From: Apache Wiki To: Apache Wiki Date: Sun, 28 Nov 2010 18:22:19 -0000 Message-ID: <20101128182219.77592.49799@eosnew.apache.org> Subject: =?utf-8?q?=5BHadoop_Wiki=5D_Update_of_=22WordCount=22_by_RobinWenglewski?= X-Virus-Checked: Checked by ClamAV on apache.org Dear Wiki user, You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for ch= ange notification. The "WordCount" page has been changed by RobinWenglewski. The comment on this change is: example updated to new API. http://wiki.apache.org/hadoop/WordCount?action=3Ddiff&rev1=3D12&rev2=3D13 -------------------------------------------------- import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.*; import org.apache.hadoop.io.*; - import org.apache.hadoop.mapred.*; + import org.apache.hadoop.mapreduce.*; - import org.apache.hadoop.util.*; + import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; + import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; + import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; + import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; = public class WordCount { = - public static class Map extends MapReduceBase implements Mapper { + public static class Map extends Mapper { private final static IntWritable one =3D new IntWritable(1); private Text word =3D new Text(); = - public void map(LongWritable key, Text value, OutputCollector output, Reporter reporter) throws IOException { + public void map(LongWritable key, Text value, Context context) throws= IOException, InterruptedException { String line =3D value.toString(); StringTokenizer tokenizer =3D new StringTokenizer(line); while (tokenizer.hasMoreTokens()) { word.set(tokenizer.nextToken()); - output.collect(word, one); + context.write(word, one); } } } = = - public static class Reduce extends MapReduceBase implements Reducer { + public static class Reduce extends Reducer { = - public void reduce(Text key, Iterator values, OutputColl= ector output, Reporter reporter) throws IOException { + public void reduce(Text key, Iterator values, Context co= ntext) = + throws IOException, InterruptedException { int sum =3D 0; while (values.hasNext()) { sum +=3D values.next().get(); } - output.collect(key, new IntWritable(sum)); + context.write(key, new IntWritable(sum)); } } = public static void main(String[] args) throws Exception { - JobConf conf =3D new JobConf(WordCount.class); - conf.setJobName("wordcount"); + Configuration conf =3D new Configuration(); + = + Job job =3D new Job(conf, "wordcount"); + = + job.setOutputKeyClass(Text.class); + job.setOutputValueClass(IntWritable.class); + = + job.setMapperClass(Map.class); + job.setReducerClass(Reduce.class); = + job.setInputFormatClass(TextInputFormat.class); + job.setOutputFormatClass(TextOutputFormat.class); - conf.setOutputKeyClass(Text.class); - conf.setOutputValueClass(IntWritable.class); - = - conf.setMapperClass(Map.class); - conf.setCombinerClass(Reduce.class); - conf.setReducerClass(Reduce.class); = - conf.setInputFormat(TextInputFormat.class); - conf.setOutputFormat(TextOutputFormat.class); + FileInputFormat.addInputPath(job, new Path(args[0])); + FileOutputFormat.setOutputPath(job, new Path(args[1])); = + job.waitForCompletion(true); - FileInputFormat.setInputPaths(conf, new Path(args[0])); - FileOutputFormat.setOutputPath(conf, new Path(args[1])); - = - JobClient.runJob(conf); } = }