package MR_process.MR4;

import java.io.IOException;
import org.apache.hadoop.mapreduce.Reducer;
import MR_process.Bean3;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;


public class Reduce4 extends Reducer<Text,Bean3,Text,NullWritable>{
    @Override
    public void reduce(Text key, Iterable<Bean3> values, Context context) throws IOException, InterruptedException {
        for(Bean3 bean:values){
            String v="{\"word\":\""+bean.getWord()+"\",\"doc_id\":\""+bean.getId()+
                "\",\"count\":"+bean.getCount()+",\"doc_len\":"+bean.getLen()+
                ",\"distinct_doc_id\":"+bean.getDistinct_id()+"}";
            //String v=bean.getWord()+","+bean.getId()+","+bean.getCount()+","+bean.getLen()+","+bean.getDistinct_id();
            context.write(new Text(v),NullWritable.get());
        }
    }
}
