package cn.pengpeng.day09.join;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;

import javax.swing.text.AbstractDocument.Content;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * map join
 * 
 * setup 时候读小表      使用hadoop hdfs里面的api进行文件的读取
 * 
 * 
 * 
 *
 */
public class JoinMR {
	public static class MapTask extends Mapper<LongWritable, Text, JoinBean, NullWritable>{
		//小表数据
		Map<String, String> map = new HashMap<>();
		@Override
		protected void setup(Context context)
				throws IOException, InterruptedException {
			Configuration conf = context.getConfiguration();
			String smallTableName = conf.get("smallTableName");
			FileSystem fs = FileSystem.get(conf);
			FSDataInputStream inputStream = fs.open(new Path(smallTableName));
			BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
			String line = null;
			while((line = br.readLine())!=null){
				String[] split = line.split("::");
				map.put(split[0], line);
			}
		}
		
		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			String[] split = value.toString().split("::");
			JoinBean joinBean = new JoinBean();
			String[] line = map.get(split[0]).split("::");
			
				//1::1193::5::978300760   评分数据
			joinBean.set(split[0], line[2], line[1], split[1], split[2], "rating");
			context.write(joinBean, NullWritable.get());
		}
	}
	
	
	public static void main(String[] args) throws Exception{
		System.setProperty("HADOOP_USER_NAME", "root");//如果不写，使用的是windows的用户名
		//hdfs 文件权限修改了     777
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://hadoop01:9000");
		conf.set("smallTableName", args[2]);
		Job job = Job.getInstance(conf, "join");
		
		//设置map和reduce，以及提交的jar
		job.setMapperClass(MapTask.class);
		job.setJarByClass(JoinMR.class);
		
		//设置输入输出类型
		/*job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(JoinBean.class);*/
		
		job.setOutputKeyClass(JoinBean.class);
		job.setOutputValueClass(NullWritable.class);
		
		//输入和输出目录 "/ratings.dat"
		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		
		
		//提交任务
		boolean completion = job.waitForCompletion(true);
		System.out.println(completion?"你很优秀！！！":"滚去调bug！！");
		
	}

}
