hadoop-user mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Senthil Sekar <senthil...@gmail.com>
Subject Problem in Submitting a Map-Reduce Job to Remote Hadoop Cluster
Date Sat, 01 Mar 2014 14:11:03 GMT
Hi ,

 I have a remote server (Cent - OS - 6.3 ) with CDH-4.0.1 installed.

 I do have another Windows-7.0 machine from which iam trying to submit
simple WordCount Map reduce job (i have included the HADOOP - 2.0.0  lib
Jars in my Eclipse environment)

I am getting the below Exception when i try to run it from ECLIPSE of my
Windows7 Machine
//-------------------
Exception in thread "main" java.io.IOException: Cannot initialize Cluster.
Please check your configuration for mapreduce.framework.name and the
correspond server addresses.
at org.apache.hadoop.mapreduce.Cluster.initialize(Cluster.java:121)
at org.apache.hadoop.mapreduce.Cluster.<init>(Cluster.java:83)
at org.apache.hadoop.mapreduce.Cluster.<init>(Cluster.java:76)
at org.apache.hadoop.mapred.JobClient.init(JobClient.java:487)
at org.apache.hadoop.mapred.JobClient.<init>(JobClient.java:466)
at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:879)
at com.pss.WordCount.main(WordCount.java:79)

//---------------------

Please find the code below

//-----------------------------------------------------
public class WordCount {
 public static class Map extends MapReduceBase implements
Mapper<LongWritable, Text, Text, IntWritable>
{
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();

@Override
public void map(LongWritable key, Text value,
OutputCollector<Text, IntWritable> output, Reporter reporter)
throws IOException {
String line=value.toString();
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens())
{
word.set(tokenizer.nextToken());
output.collect(word, one);
}
 }
 }
    public static class Reduce extends MapReduceBase implements
Reducer<Text, IntWritable, Text, IntWritable>
    {

@Override
public void reduce(Text key, Iterator<IntWritable> values,
OutputCollector<Text, IntWritable> output, Reporter reporter)
throws IOException {
// TODO Auto-generated method stub
 int sum=0;
while(values.hasNext())
{
sum+=values.next().get();
}
output.collect(key, new IntWritable(sum));
}

    }

    public static void main(String[] args) throws IOException {
    Configuration config= new Configuration();
    config.set("fs.default.name","hdfs://xyz-hostname:9000");
    config.set("mapred.job.tracker","xyz-hostname:9001");


        JobConf conf= new JobConf(config);

    conf.setJarByClass(WordCount.class);
        //conf.setJar(jar);

    conf.setJobName("WordCount");
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(IntWritable.class);

    conf.setMapperClass(Map.class);
    //conf.setCombinerClass(Reduce.class);
    conf.setReducerClass(Reduce.class);
    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);


      FileInputFormat.setInputPaths(conf, new Path(args[0]));
      FileOutputFormat.setOutputPath(conf, new Path(args[1]));

      JobClient.runJob(conf);


}
}

//---------------------------------------------------------------------------------------------


Please help me to resolve this issue.

Regards,

Senthil

Mime
View raw message