package com.zhl.hadoop.cases.joincache;

import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;

/**
 * @program: demos
 * @description:
 * @author: 刘振华
 * @create: 2020-10-30 18:57
 **/
public class JoinCacheMapper extends Mapper<LongWritable, Text,Text,Text> {
	private Map<String,String> map;
	@Override
	protected void setup(Context context) throws IOException, InterruptedException {
		//在这里做map的映射
		//1.获取缓存文件
		URI[] cacheFiles = context.getCacheFiles();

		//2.获取filesystem
		FileSystem fs = FileSystem.get(cacheFiles[0], context.getConfiguration(), "zhl");
		//3.获取输出流
		FSDataInputStream dis = fs.open(new Path(cacheFiles[0]));

		//4.存入map集合
		BufferedReader br = new BufferedReader(new InputStreamReader(dis));
		map = new HashMap();
		String line;
		while ((line = br.readLine())!=null){
			String[] strs = line.split("\t");
			map.put(strs[0],line);
		}
		//5.关闭流
		br.close();
		dis.close();
	}

	@Override
	protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

	}
}
