hadoop-common-user mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "Subramanian, Hema " <hema.subraman...@citi.com>
Subject Re: ClassNotFound just started with custom mapper
Date Mon, 30 Jan 2012 22:37:18 GMT
I am facing issues while trying to run a job from windows (through eclipse) on my hadoop cluster
on my RHEL VM's. When I run it as "run on hadoop" it works fine, but when I run it as a java
application, it throws classnotfound exception

INFO: Task Id : attempt_201201101527_0037_m_000000_0, Status : FAILED
java.lang.RuntimeException: java.lang.ClassNotFoundException: TestHadoop$Map
	at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:866)
	at org.apache.hadoop.mapreduce.JobContext.getMapperClass(JobContext.java:195)
	at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:718)
	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:369)
	at org.apache.hadoop.mapred.Child$4.run(Child.java:259)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:396)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1059)
	at org.apache.hadoop.mapred.Child.main(Child.java:253)

Below is the stub:

public class TestHadoop extends Configured {

	public static class Map extends Mapper<LongWritable, Text, Text, IntWritable> {
		private final static IntWritable one = new IntWritable(1);
		private Text word = new Text();

		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException
{
			String line = value.toString();
			StringTokenizer tokenizer = new StringTokenizer(line);
			while (tokenizer.hasMoreTokens()) {
				word.set(tokenizer.nextToken());
				context.write(word, one);
			
			}
		}
	}

	public static class Reduce extends Reducer<Text, IntWritable, Text, IntWritable> {
		public void reduce(Text key, Iterable<IntWritable> values, Context context) throws
IOException, InterruptedException {
			int sum = 0;
			for (IntWritable val:values) {
				sum += val.get();
			}
			context.write(key, new IntWritable(sum));
		}
	}
	
	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration(true);
		conf.set("fs.default.name","hdfs://vm-acd2-4c51:54310/");
		conf.set("mapred.job.tracker","hdfs://vm-acd2-4c51:54311/");
		conf.set("mapreduce.jobtracker.staging.root.dir","/app/hadoop/mapred/staging");
		Job jobconf = new Job(conf,"TestHadoop");
		jobconf.setJarByClass(TestHadoop.class);
		jobconf.setOutputKeyClass(Text.class);
		jobconf.setOutputValueClass(IntWritable.class);

		jobconf.setMapperClass(Map.class);
		jobconf.setCombinerClass(Reduce.class);
		jobconf.setReducerClass(Reduce.class);

		jobconf.setInputFormatClass(TextInputFormat.class);
		jobconf.setOutputFormatClass(TextOutputFormat.class);

		FileInputFormat.setInputPaths(jobconf, new Path("/tmp/Hadoop_Temp_Data/Input/"));
		FileOutputFormat.setOutputPath(jobconf, new Path("/tmp/Hadoop_Temp_Data/Output1/"));
		jobconf.waitForCompletion(true);
	}

}

Any help will be greatly appreciated!

Thanks
Hema Subramanian

Mime
View raw message