package cn.hyxy.hadoop;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class Demo22_ReducerJoinMR extends Configured implements Tool {

	@Override
	public int run(String[] args) throws Exception {
		if (args.length != 3) {
			System.out.println("usage : in out...");
			return -1;
		}
		Configuration config = getConf();

		// 添加系统设置参数，执行“跨平台”运行!!!!!
		config.set("fs.defaultFS", "hdfs://hadoop31:8020");//添加此信息：本地运行，hdfs 获取文件
		config.set("mapreduce.framework.name", "yarn");
		config.set("yarn.resourcemanager.hostname", "hadoop31");
		config.set("mapreduce.app-submission.cross-platform", "true");
		config.set("dfs.permissions", "false");

		FileSystem fs = FileSystem.get(config);
		Path path = new Path(args[2]);
		if (fs.exists(path)) {
			fs.delete(path, true);
		}

		Job job = Job.getInstance(config, "ReducerJoin");
//		job.setJarByClass(getClass());
		job.setJar("./target/hadoop-2.7.6-0.0.1-SNAPSHOT.jar");

		job.setMapperClass(MyMapper.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(ResultBean.class);

		job.setReducerClass(JoinReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(NullWritable.class);

		//添加2个输入文件！！！
		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileInputFormat.addInputPath(job, new Path(args[1]));
		FileOutputFormat.setOutputPath(job, path);

		return job.waitForCompletion(true) ? 0 : 1;
	}

	public static void main(String[] args) throws Exception {
		int code = ToolRunner.run(new Demo22_ReducerJoinMR(), args);
		System.exit(code);
	}

	public static class MyMapper extends Mapper<LongWritable, Text, Text, ResultBean> {
		/*private int flag=-1;
		@Override
		protected void setup(Mapper<LongWritable, Text, Text, ResultBean>.Context context)
				throws IOException, InterruptedException {
			//根据不同的文件，标记为：0或1
			FileSplit fsSplit=(FileSplit) context.getInputSplit();
			String path=fsSplit.getPath().getParent().getName();
			if (path.contains("/a/1")) {
				flag=0;
			}else {
				flag=1;
			}
			System.out.println("标记为："+flag);
		}*/
		
		private ResultBean resultbean;
		
		@Override  //S001,Jack,23  --- 0       C001,语文,83,S001-----1
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, ResultBean>.Context context)
				throws IOException, InterruptedException {
			String[] strs=value.toString().split(",");
			if (strs.length==3) {
				resultbean=new ResultBean(strs[1], strs[2], 0);
				context.write(new Text(strs[0]), resultbean);
			}else {
				resultbean=new ResultBean(strs[1], strs[2], 1);
				context.write(new Text(strs[3]), resultbean);
			}
		}
	}

	public static class JoinReducer extends Reducer<Text,ResultBean,Text,NullWritable>{
		private Text key4=new Text();
		@Override  //S001,[{Jack,25},{语文，90},{hadoop,96},...]
		protected void reduce(Text key, Iterable<ResultBean> value,
				Reducer<Text, ResultBean, Text, NullWritable>.Context context) throws IOException, InterruptedException {
			String stud=null;
			List<String> list=new ArrayList<>();
			for (ResultBean bean : value) {
				if (bean.getFlag()==0) {  //等于0的只有1个
					stud=bean.getKey()+"\t"+bean.getDatas();
				}else {
					list.add(bean.getKey()+"\t"+bean.getDatas());
				}
			}
			if (stud!=null) {
				for (String string : list) {
					key4.set(stud+"\t"+string);
					context.write(key4, NullWritable.get());
				}
			}			
		}
	}	
	
	public static class ResultBean implements Writable{
		private String key;
		private String datas;
		private int flag;
		
		public ResultBean() {
			
		}
				
		public ResultBean(String key, String datas, int flag) {
			this.key = key;
			this.datas = datas;
			this.flag = flag;
		}

		@Override
		public void readFields(DataInput in) throws IOException {
			key=in.readUTF();
			datas=in.readUTF();
			flag=in.readInt();
		}

		@Override
		public void write(DataOutput out) throws IOException {
			out.writeUTF(key);
			out.writeUTF(datas);
			out.writeInt(flag);
		}

		public String getKey() {
			return key;
		}

		public void setKey(String key) {
			this.key = key;
		}

		public String getDatas() {
			return datas;
		}

		public void setDatas(String datas) {
			this.datas = datas;
		}

		public int getFlag() {
			return flag;
		}

		public void setFlag(int flag) {
			this.flag = flag;
		}			
	}
}
