package com.shujia.mr.pinglunWC;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.dictionary.CustomDictionary;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;

public class PingLunMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
    private ArrayList<String> wordsList;

    /*
        通过观察Mapper类的源码发现，run方法中，调用map方法之前，有调用setup方法，但是源码中的setup方法没有任何实现
        但是，我们可以知道，利用这个setup方法，可以实现在map逻辑之前一些的逻辑动作，比如我们这里需要在每次执行map逻辑之前加载词典
     */
    @Override
    protected void setup(Mapper<LongWritable, Text, Text, LongWritable>.Context context) throws IOException, InterruptedException {
        wordsList = new ArrayList<>();

        //将来该方法会在每次调用map逻辑之前执行一遍
        //加载分词器的词典 cidian.txt 在HDFS上的。
        //利用FileSystem和context获取文件系统对象
        FileSystem fs = FileSystem.get(context.getConfiguration());
        //打开词典文件
        FSDataInputStream fsInputStream = fs.open(new Path("/bigdata29/data/cidian.txt"));
        //可以使用转换流将字节流变成字符流,在使用字符缓冲输入流进行包装
        BufferedReader br = new BufferedReader(new InputStreamReader(fsInputStream));
        String word = null;
        while ((word=br.readLine())!=null){
            CustomDictionary.add(word);
            wordsList.add(word);
        }
    }

    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, LongWritable>.Context context) throws IOException, InterruptedException {
        String line = value.toString();
        //创建分词器对象
        Segment segment = HanLP.newSegment();
        List<Term> termList = segment.seg(line);
        //旧写法
        for (Term term : termList) {
            String word = term.word;
            //判断该词语是否是我们想要词汇
            if(wordsList.contains(word)){
                context.write(new Text(word),new LongWritable(1L));
            }
        }


        //新写法
//        termList.stream()
//                .filter(e->wordsList.contains(e.word))
//                .map(e->{
//                    try {
//                        context.write(new Text(e.word),new LongWritable(1L));
//                    } catch (Exception ex) {
//                        ex.printStackTrace();
//                    }
//                    return null;
//                });
    }
}
