package cn.lsh.mapper;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;

import java.io.IOException;
import java.io.StringReader;

/**
 * 计算IF（需要作归一化）和计算热评总条数N，因此需要输出一下三类k-v
 * 1、每条热评分词计数 word_id	1
 * 2、每条热评总词数 id 1
 * 3、计算热评总词数 count	1
 */
public class HotCommentMapper extends Mapper<Text, Text, Text, IntWritable> {
	private final Text wordKey = new Text();
	public static final Text counter = new Text("count");
	private final IntWritable one = new IntWritable(1);

	@Override
	protected void map(Text key, Text value, Context context) throws IOException, InterruptedException {
		//样本数据：5824431 我大剑三不负基三盛名，听的我都醉了

		//计算词频IF
		StringReader sr = new StringReader(value.toString());
		IKSegmenter ikSegmenter = new IKSegmenter(sr, true);
		Lexeme lexeme;
		while ((lexeme = ikSegmenter.next()) != null) {
			String word = lexeme.getLexemeText();
			wordKey.set(word + "_" + key);
			//输出每条中各词计数
			context.write(wordKey, one);
			//输出每条热评的分词的总个数
			context.write(key, one);
		}

		//输出第一类数据到reduce统计热评总数
		context.write(counter, one);
	}
}
