package cn.pengpeng.day05.join;

import java.io.IOException;
import java.util.ArrayList;

import org.apache.commons.beanutils.BeanUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class ReduceJoin {
	
	public static class ReduceJoinMapper extends Mapper<LongWritable,Text,Text,JoinBean>{
		String fileName;
		@Override
		protected void setup(Mapper<LongWritable, Text, Text, JoinBean>.Context context)
				throws IOException, InterruptedException {
			FileSplit inputSplit = (FileSplit) context.getInputSplit();
			fileName = inputSplit.getPath().getName();
		}
		
		
		
		@Override
		protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, JoinBean>.Context context)
				throws IOException, InterruptedException {
			
			String[] fields = value.toString().split(",");
			JoinBean bean = new JoinBean();
			
			if(fileName.startsWith("order")) {
				bean.set(fields[0], fields[1], "NULL", -1, "NULL","t_order");
			}else {
				bean.set("NULL", fields[0], fields[1], Integer.parseInt(fields[2]), fields[3],"t_user");
			}
			
			context.write(new Text(bean.getUid()), bean);
		}
	}
	
	public static class ReduceJoinReducer extends Reducer<Text, JoinBean, JoinBean, NullWritable>{
		
		@Override
		protected void reduce(Text uid, Iterable<JoinBean> beans,
				Reducer<Text, JoinBean, JoinBean, NullWritable>.Context context) throws IOException, InterruptedException {
			ArrayList<JoinBean> orderBeans = new ArrayList<>();
			JoinBean userBean = new JoinBean();
			
			// 分离两表数据
			for (JoinBean b : beans) {
				if(b.getTableName().equals("t_order")) {
					JoinBean newBean = new JoinBean();
					newBean.set(b.getOderId(), b.getUid(), b.getUname(), b.getAge(), b.getLover(), b.getTableName());
					//BeanUtils.copyProperties(newBean, b);
					orderBeans.add(newBean);
				}else {
					userBean.set(b.getOderId(), b.getUid(), b.getUname(), b.getAge(), b.getLover(), b.getTableName());
				}
			}
			
			// 拼接数据
			for (JoinBean ob : orderBeans) {
				ob.setUname(userBean.getUname());
				ob.setAge(userBean.getAge());
				ob.setLover(userBean.getLover());
				context.write(ob, NullWritable.get());
				
			}
			
			
		}
		
	}
	
	
	
	public static void main(String[] args) throws Exception {

		Configuration conf = new Configuration(); // 会自动加载运行时classpath中的Hadoop配置文件

		Job job = Job.getInstance(conf);

		job.setJarByClass(ReduceJoin.class);

		job.setMapperClass(ReduceJoinMapper.class);
		job.setReducerClass(ReduceJoinReducer.class);

		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(JoinBean.class);

		job.setOutputKeyClass(JoinBean.class);
		job.setOutputValueClass(NullWritable.class);

		Path path = new Path(args[1]);
		FileSystem fs = FileSystem.get(conf);
		if (fs.exists(path)) {
			fs.delete(path, true);
		}

		FileInputFormat.setInputPaths(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, path);

		job.setNumReduceTasks(1);

		boolean res = job.waitForCompletion(true);
		System.exit(res ? 0 : 1);

	}
	

	
	
	
	
	

}
