package psnl.bingo.mr.demo3;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import psnl.bingo.mr.demo1.D1Line;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashSet;
import java.util.Set;

public class D3Mapper extends Mapper<LongWritable, Text, Text, NullWritable> {
    Text outKey = new Text();

    private static Set<String> filterCity = new HashSet<>();

    /**
     * 装在数据到缓存
     * @param context
     * @throws IOException
     * @throws InterruptedException
     */
    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        Configuration conf = context.getConfiguration();
        String filterFile = conf.get("filterFile");

        //获取文件系统并开流
        FileSystem fileSystem = FileSystem.get(conf);
        FSDataInputStream fsdInput = fileSystem.open(new Path(filterFile));

        //通过包装流转换为reader
        BufferedReader bfReader = new BufferedReader(new InputStreamReader(fsdInput, "utf-8"));

        //逐行读取，按行处理
        String line;
        while (StringUtils.isNotEmpty(line = bfReader.readLine())) {
            filterCity.add(line);
        }

        //关流
        IOUtils.closeStream(bfReader);
    }

    // 进行行数据处理：数据过滤转换等
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String[] item = value.toString().split(";");

        if(filterCity.contains(item[0])) { // 判断是否否和过滤条件
            outKey.set(value);
            context.write(outKey, NullWritable.get());
        }

    }
}
