2
votes
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class CommonFriends {
        public static class TokenizerMapper
                extends Mapper<Object, Text, Text, IntWritable>{
                private IntWritable friend = new IntWritable();
                private Text friends = new Text();
                public void map(Object key, Text value, Context context )     throws IOException, InterruptedException {
                        StringTokenizer itr = new     StringTokenizer(value.toString(),"\n");
                    while (itr.hasMoreTokens()) {
                            String[] line = itr.nextToken().split(" ");
                            if(line.length > 2 ){
                                    int person = Integer.parseInt(line[0]);
                                    for(int i=1; i<line.length;i++){
                                            int ifriend = Integer.parseInt(line[i]);
                                            friends.set((person < ifriend ? person+"-"+ifriend : ifriend+"-"+person));
                                            for(int j=1; j< line.length; j++ ){
                                                    if( i != j ){
                                                            friend.set(Integer.parseInt(line[j]));
                                                            context.write(friends, friend);
                                                    }
                                            }
                                    }
                            }
                    }
            }
    }

    public static class IntSumReducer extends Reducer<Text,IntWritable,Text,Text> {
            private Text result = new Text();
            public void reduce(Text key, Iterable<IntWritable> values, Context context)
                    throws IOException, InterruptedException {
                    HashSet<IntWritable> duplicates = new HashSet();
                    ArrayList<Integer> tmp = new ArrayList();
                    for (IntWritable val : values) {
                            if(duplicates.contains(val))
                                    tmp.add(val.get());
                            else
                                    duplicates.add(val);
                    }
                    result.set(tmp.toString());
                    context.write(key, result);
            }
    }

    public static void main(String[] args) throws Exception {
            Configuration conf = new Configuration();
            Job job = Job.getInstance(conf, "Common Friends");
            job.setJarByClass(CommonFriends.class);
            job.setMapperClass(TokenizerMapper.class);
            job.setCombinerClass(IntSumReducer.class);
            job.setReducerClass(IntSumReducer.class);
            job.setMapOutputKeyClass(Text.class);
            job.setMapOutputValueClass(IntWritable.class);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(Text.class);
            FileInputFormat.addInputPath(job, new Path(args[0]));
            FileOutputFormat.setOutputPath(job, new Path(args[1]));
            System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}

Error: java.io.IOException: wrong value class: class org.apache.hadoop.io.Text is not class org.apache.hadoop.io.IntWritable at org.apache.hadoop.mapred.IFile$Writer.append(IFile.java:194) at org.apache.hadoop.mapred.Task$CombineOutputCollector.collect(Task.java:1350) at org.apache.hadoop.mapred.Task$NewCombinerRunner$OutputConverter.write(Task.java:1667) at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89) at org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer$Context.write(WrappedReducer.java:105) at CommonFriends$IntSumReducer.reduce(CommonFriends.java:51) at CommonFriends$IntSumReducer.reduce(CommonFriends.java:38) at org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:171) at org.apache.hadoop.mapred.Task$NewCombinerRunner.combine(Task.java:1688) at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1637) at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1489) at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793) at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657) at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)

This is my code, The error message is the following. Any idea?? I think the problem in the configuration of output classes of mapper and the reducer the input files are a list of numbers in file. Some more details will be provided if needed. The program finds the common friend between friends

2
Please can you add comments in your code to identify the erroring lines (51 and 38)?Ben Watson

2 Answers

0
votes

remove job.setCombinerClass(IntSumReducer.class); in your code could solve this problem

0
votes

Just had a look into your code, it seems you are using reducer code as combiner code.

One thing you need to check.

Your combiner code will take input in form of <Text, IntWritable> and output of Combiner would be <Text, Text> format .

Then the input to your Reducer would be in format of < Text, Text> but you had specified the input to Reducer as < Text, IntWritable > , so it is throwing the error.

Two things can be done :-

1) You might consider changing the output type of Reducer .

2) You might consider writing a separate Combiner code.