0
votes

Trying to run one Hadoop Map Reduce code but getting below error. Not sure why...

hadoop jar BWC11.jar WordCountDriver "/home/training/training_material/data/shakespeare/comedies" "/home/training/training_material/data/shakespeare/AWL" Warning: $HADOOP_HOME is deprecated.

Exception in thread "main" java.lang.NoClassDefFoundError: WordCountDriver (wrong name:

com/felix/hadoop/training/WordCountDriver) at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClass(ClassLoader.java:791) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142) at java.net.URLClassLoader.defineClass(URLClassLoader.java:449) at java.net.URLClassLoader.access$100(URLClassLoader.java:71) at java.net.URLClassLoader$1.run(URLClassLoader.java:361) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:423) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:410) at java.lang.ClassLoader.loadClass(ClassLoader.java:356) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:264) at org.apache.hadoop.util.RunJar.main(RunJar.java:149) [training@localhost BasicWordCount]$

Could someone help me out of this ?

Driver code:

package com.felix.hadoop.training;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;


public class WordCountDriver extends Configured implements Tool{

    public static void main(String[] args) throws Exception
    {
        ToolRunner.run(new WordCountDriver(),args);
    }

    @Override
    public int run(String[] args) throws Exception {

        Job job = new Job(getConf(),"Basic Word Count Job");
        job.setJarByClass(WordCountDriver.class);

        job.setMapperClass(WordCountMapper.class);
        job.setReducerClass(WordCountReducer.class);

        job.setInputFormatClass(TextInputFormat.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        job.setNumReduceTasks(1);

        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        job.waitForCompletion(true);



        return 0;
    }


}

Mapper Code:

package com.felix.hadoop.training;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
/**
 * 
 * @author training
 * Class : WordCountMapper
 *
 */

public class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable>{
    /**
     * Optimization: Instead of creating the variables in the 
     */

    @Override
    public void map(LongWritable inputKey,Text inputVal,Context context) throws IOException,InterruptedException
    {
        String line = inputVal.toString();
        String[] splits = line.trim().split("\\W+");
        for(String outputKey:splits)
        {
            context.write(new Text(outputKey), new IntWritable(1));

        }


    }

}

Reducer code:

package com.felix.hadoop.training;
import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;


public class WordCountReducer extends Reducer<Text,IntWritable,Text, IntWritable>{

    @Override
    public void reduce(Text key,Iterable<IntWritable> listOfValues,Context context) throws IOException,InterruptedException
    {
        int sum=0;
        for(IntWritable val:listOfValues)
        {
            sum = sum + val.get();
        }
        context.write(key,new IntWritable(sum));


    }

}

Not sure why I am getting that error.. I have tried to add the class path, copied class files to the location where the .jar files reside etc... but to no avail.

1

1 Answers

0
votes

Adding the package name "com.felix.hadoop.training" before "WordCountDriver".