package recoverLinkGraph;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;

import removeDanglingPage.ReadBytes;

import mapred.AbstractReducer;
import mapred.util.BytesBuilder;

public class RecoverLinkGraph_Reducer extends AbstractReducer<BytesWritable, BytesWritable, BytesWritable, BytesWritable>{

	@Override
	public void reduce(BytesWritable key, Iterator<BytesWritable> values,
			OutputCollector<BytesWritable, BytesWritable> output,
			Reporter reporter) throws IOException {
		// TODO Auto-generated method stub
		BytesBuilder link = new BytesBuilder();
		while(values.hasNext()) {
			ReadBytes t = new ReadBytes(values.next());
			link.append(t.nextUrl());
		}
		output.collect(key, new BytesWritable(link.close()));
	}

//	@Override
//	public void reduce(Text key, Iterator<Text> values,
//			OutputCollector<Text, Text> output, Reporter reporter)
//			throws IOException {
//		// TODO Auto-generated method stub
//		StringBuilder link = new StringBuilder();
//		if(values.hasNext()) {
//			String fl = values.next().toString().trim();
//			link.append(fl);
//		}
//		
//		while(values.hasNext()) {
//			String value = values.next().toString().trim();
//			link.append("\t"+value);
//		}
//		output.collect(key, new Text(link.toString()));
//		
//	}

}
