package com.shujia.mr.wc2;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.dictionary.CustomDictionary;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;
import java.util.List;

public class SanGuoMapper extends Mapper<LongWritable, Text, Text, LongWritable> {

    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, LongWritable>.Context context) throws IOException, InterruptedException {
        Segment segment = HanLP.newSegment();

        // 告诉分词器，哪些是一个词，不要分开
        // 自定义词典
        CustomDictionary.add("刘备");
        CustomDictionary.add("关羽");
        CustomDictionary.add("张飞");
        CustomDictionary.add("数加科技");
        CustomDictionary.add("方直");

        String line = value.toString();
        List<Term> termList = segment.seg(line);

        for (Term term : termList) {
            String ciYu = term.word;
            if("刘备".equals(ciYu) || "关羽".equals(ciYu) || "张飞".equals(ciYu) || "数加科技".equals(ciYu) || "方直".equals(ciYu)){
                context.write(new Text(ciYu),new LongWritable(1L));
            }
        }
    }
}
