package cn.edu.cqu.fredyvia.Predict;

import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;
import java.net.URI;
import java.util.HashSet;
import java.util.Set;

/**
 * @Author: FredyVia
 * @Date: 2020/11/19 23:24
 */
public class PredictMapper extends Mapper<Object, Text, Text, IntWritable> {
    private Set<String> stopwords;
    private Set<String> stopPunc;

    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        URI[] cacheFile = context.getCacheFiles();
        FileSystem fs = FileSystem.get(context.getConfiguration());
        Path stopwordPath = new Path(cacheFile[0]);
        FSDataInputStream inStream = fs.open(stopwordPath);
        stopwords = new HashSet<String>();
        String line;
        while (inStream.available() > 0) {
            /*使用readLine是因为使用readUTF()会出现EOFException*/
            line = inStream.readLine();
            stopwords.add(line);
        }
        Path stopPuncPath = new Path(cacheFile[1]);
        FSDataInputStream inStream2 = fs.open(stopPuncPath);
        stopPunc = new HashSet<String>();
        while (inStream2.available() > 0) {
            line = inStream2.readLine();
            stopPunc.add(line);
        }
    }

    @Override
    protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {


    }
}
