Search code examples
javahadoopmapreduceinverted-index

MapReduce Inverted Index Program


Why am I not able to pass values.next() (which is IntWritable object) to the files hashset which also is IntWriteable?(REFER THE REDUCER CLASS)

import java.io.IOException;
import java.util.*;

    import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
   import org.apache.hadoop.io.*;
  import org.apache.hadoop.mapred.*;
     import org.apache.hadoop.util.*;


  public class LineIndexer{ 

THE MAPPER CLASS

public static class LineIndexMapper extends MapReduceBase implements Mapper<LongWritable, Text, Text, IntWritable> {
    private final static Text word = new Text();
    private final static Text location = new Text();

    public void map(LongWritable key, Text val, OutputCollector<Text, IntWritable> output, Reporter reporter)
                    throws IOException {
        // get the filename where this line came from
        FileSplit fileSplit = (FileSplit)reporter.getInputSplit();
        IntWritable fileNo = new IntWritable(Integer.parseInt(fileSplit.getPath().getName()));


        String line = val.toString();
        StringTokenizer itr = new StringTokenizer(line.toLowerCase());

        while (itr.hasMoreTokens()) {
            word.set(itr.nextToken());
            output.collect(word, fileNo);
        }
    }
}

THE REDUCER CLASS

public static class LineIndexReducer extends MapReduceBase implements Reducer<Text, IntWritable, Text, Text> {
    private final static HashSet<IntWritable> files = new HashSet<IntWritable>();

    public void reduce(Text key, Iterator<IntWritable> values,
            OutputCollector<Text, Text> output, Reporter reporter)
                    throws IOException {
        files.clear();

        int count=0;
        StringBuilder toReturn = new StringBuilder();
        StringBuilder keyfreq = new StringBuilder();
        System.out.println("values"+values);
        while (values.hasNext()){
            //String filename = values.next().toString();
            System.out.println("value.next"+values.next());
            if( !(files.contains(values.next()))){  
                files.add(values.next());

                if (count!=0)
                    toReturn.append("-> ");

                count++;
                toReturn.append(values.next());
            }
        }
        IntWritable freq = new IntWritable(count);
        keyfreq.append(key.toString());
        keyfreq.append("|");
        keyfreq.append(freq);
        output.collect(new Text(keyfreq.toString()), new Text(toReturn.toString()));
    }
}

THE RUN METHOD

public static void run(String input, String output){
    JobClient client = new JobClient();
    JobConf conf = new JobConf(LineIndexer.class);

    conf.setJobName("InvertedIndex");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(conf, new Path(input));
    FileOutputFormat.setOutputPath(conf, new Path(output));

    conf.setMapperClass(LineIndexMapper.class);
    conf.setReducerClass(LineIndexReducer.class);

    client.setConf(conf);

    try {
        JobClient.runJob(conf);
    } catch (Exception e) {
        e.printStackTrace();
    }

}

THE MAIN METHOD

public static void main(String[] args) {
    if( args.length != 2 ){
        System.err.println("InvertedIndex <input_dir> <output_dir>");
    }else{
        run(args[0], args[1]);
    }
     }
    }

ERROR:

       For lines 49 and 50 of LineIndexReducer class
       line 49 :if( !(files.contains(values.next()))){  
       line 50:      files.add(values.next());

java.util.NoSuchElementException: iterate past last value at org.apache.hadoop.mapred.Task$ValuesIterator.next(Task.java:121 at org.apache.hadoop.mapred.ReduceTask$ReduceValuesIterator.moveToNext (ReduceTask.java:250) at org.apache.hadoop.mapred.ReduceTask$ReduceValuesIterator.next(ReduceTask.java:246) at LineIndexer$LineIndexReducer.reduce(LineIndexer.java:49) at LineIndexer$LineIndexReducer.reduce(LineIndexer.java:1) at org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:522) at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:421) at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:398)


Solution

  • You should call next() only once:

         while (values.hasNext()){
    
            IntWritable filename = values.next();
            System.out.println("value.next" + filename );
            if( !(files.contains(filename))){  
                files.add(filename);
    
                if (count!=0)
                    toReturn.append("-> ");
    
                count++;
                toReturn.append(filename);
            }
        }