package com.shujia.mr.fenci;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.dictionary.CustomDictionary;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;

public class PingLunMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
    private ArrayList<String> list;

    //该方法是每次执行map逻辑之前都会执行一次该方法
    @Override
    protected void setup(Mapper<LongWritable, Text, Text, LongWritable>.Context context) throws IOException, InterruptedException {

        list = new ArrayList<>();

        //写该方法的目的是为了每次map分词之前要加载一次我们的自定义词典
        FileSystem fs = FileSystem.get(context.getConfiguration()); // 获取HDFS对应的文件系统对象
        //本质上获取的是字节流对象
        FSDataInputStream fsDataInputStream = fs.open(new Path("/bigdata27/words/words.txt"));
        //封装成字符流对象
        BufferedReader br = new BufferedReader(new InputStreamReader(fsDataInputStream));
        String line = null;
        while ((line=br.readLine())!=null){
            //将读取到的词语添加到自定义词典中
            CustomDictionary.add(line);
            list.add(line);
        }
    }

    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, LongWritable>.Context context) throws IOException, InterruptedException {
        //创建分词器对象
        Segment segment = HanLP.newSegment();
        String line = value.toString();
        List<Term> words = segment.seg(line);
        words.stream() // 转化成流处理操作
                .filter(e->list.contains(e.word)) // 过滤出我们自己的词语
                .forEach(e-> {
                    try {
                        context.write(new Text(e.word),new LongWritable(1L));
                    } catch (Exception ex) {
                        ex.printStackTrace();
                    }
                });

    }
}
