package cn.hyxy.hadoop;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class Demo21_MapJoinMR extends Configured implements Tool {

	@Override
	public int run(String[] args) throws Exception {
		if (args.length != 3) {
			System.out.println("usage : in out...");
			return -1;
		}
		Configuration config = getConf();

		// 添加系统设置参数，执行“跨平台”运行!!!!!
		config.set("fs.defaultFS", "hdfs://hadoop31:8020");//添加此信息：本地运行，hdfs 获取文件
		config.set("mapreduce.framework.name", "yarn");
		config.set("yarn.resourcemanager.hostname", "hadoop31");
		config.set("mapreduce.app-submission.cross-platform", "true");
//		config.set("dfs.permissions", "false");

		FileSystem fs = FileSystem.get(config);
		Path path = new Path(args[2]);
		if (fs.exists(path)) {
			fs.delete(path, true);
		}

		Job job = Job.getInstance(config, "MapJoin");
		// job.setJarByClass(getClass());
		job.setJar("./target/hadoop-2.7.6-0.0.1-SNAPSHOT.jar");

		job.setMapperClass(JoinMapper.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(NullWritable.class);

		// 提交缓存
		job.addCacheFile(new URI(args[1])); // URI.create("...")

		job.setReducerClass(Reducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(NullWritable.class);

		FileInputFormat.addInputPath(job, new Path(args[0]));
		// FileInputFormat.addInputPath(job, new Path("file///D:/a/5"));
		FileOutputFormat.setOutputPath(job, path);

		return job.waitForCompletion(true) ? 0 : 1;
	}

	public static void main(String[] args) throws Exception {
		int code = ToolRunner.run(new Demo21_MapJoinMR(), args);
		System.exit(code);
	}

	public static class JoinMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
		// 声明一个hashMap，用sid做key,用name做value
		private Map<String, String> map = new HashMap<>();

		@Override
		public void setup(Context context) throws IOException, InterruptedException {
			URI[] uris = context.getCacheFiles();
			for (URI u : uris) {
				System.out.println(">缓存的文件是>" + u.getPath());
			}
			// 直接读取这个缓存的文件
			if (uris.length > 0) {
				String path = uris[0].getPath();
				String fileName = path.substring(path.lastIndexOf("/") + 1);
				File file = new File(fileName);
				if (file.exists()) {
					System.out.println("文件存在，文件名是：" + fileName);
					BufferedReader br = new BufferedReader(new FileReader(file));
					String line = null;
					while ((line = br.readLine()) != null) {
						String[] strs = line.split(",");
						map.put(strs[0], strs[1]);
					}
				}
			}
		}

		private Text key2 = new Text();

		@Override
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			// 从成绩表中解析出数据
			String[] strs = value.toString().split(",");
			if (map.containsKey(strs[3])) {// 判断inner join 有没有关系
				String name = map.get(strs[3]);// 获取学生的姓名
				key2.set(strs[3] + "\t" + name + "\t" + strs[1] + "\t" + strs[2]);
				context.write(key2, NullWritable.get());
			}
		}
	}
}
