package com.demo.yarn.mr2;

import java.io.IOException;
import java.util.StringTokenizer;

import com.demo.util.RandomUtil;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import static org.apache.hadoop.mapreduce.lib.input.FileInputFormat.PATHFILTER_CLASS;
import static org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPathFilter;


public class MapReduceCaseAvg extends Configured implements Tool{

	public static class AvgMapper extends Mapper<Object, Text, Text, IntWritable>{
		@Override
		protected void map(Object key, Text value, Context context)
				throws IOException, InterruptedException {
			//获取文件内容
			String content = value.toString() ;
			//字符串切分
			StringTokenizer st = new StringTokenizer(content) ;
			while(st.hasMoreElements()){
				String strName = st.nextToken() ;	//学员姓名
				String strSorce = st.nextToken() ;	//学员成绩
				//输出key，value
				context.write(new Text(strName),  new IntWritable(Integer.parseInt(strSorce)));
			}
		}
	}
	
	public static class AvgReducer extends Reducer<Text, IntWritable, Text, IntWritable>{
		//<张三 ，{98，89，79}>
		@Override
		protected void reduce(Text key, Iterable<IntWritable> values,
				Context context) throws IOException, InterruptedException {
			//平均值，即使将所有的成绩相加除以科目数
			int sum = 0 ;	//总成绩
			int num = 0 ;	//总科目
			for (IntWritable score : values) {
				sum += score.get() ;	//累加每门课得成绩
				num ++ ;
			}
			context.write(key,  new IntWritable((int)sum/num));
		}
	}
	
	
	@Override
	public int run(String[] args) throws Exception {
		//任务和参数
		Job job = Job.getInstance(getConf(), "avg mr") ;
		job.setJarByClass(MapReduceCaseAvg.class);
		
		/*设置map方法的类*/
		job.setMapperClass(AvgMapper.class);
		job.setReducerClass(AvgReducer.class);
		
		/*设置输出的key和value的类型*/
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);
		
		/*设置输入输出参数*/
		FileInputFormat.addInputPath(job, new Path(args[0]));
		//FileInputFormat.setInputPathFilter(job,AvgInputFileFilter.class);
		FileInputFormat.setInputDirRecursive(job,true);
		FileOutputFormat.setOutputPath(job, new Path(args[1]+ RandomUtil.nowstr()));


		/*提交作业到集群并等待任务完成*/
		boolean isSuccess = job.waitForCompletion(true);
		
		return isSuccess ? 0 : 1 ;
	}
	
	public static void main(String[] args) throws Exception {
		int res = ToolRunner.run(new MapReduceCaseAvg(), args) ;
		System.exit(res);
	}
}
