package cn.pengpeng.day07.topn3;

import java.io.File;
import java.io.IOException;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.codehaus.jackson.map.ObjectMapper;

public class TopN3 {
	public static class MapTask extends Mapper<LongWritable, Text, MovieBean, NullWritable>{
		@Override
		protected void map(LongWritable key, Text value,
				Mapper<LongWritable, Text, MovieBean, NullWritable>.Context context)
				throws IOException, InterruptedException {
			try {
				ObjectMapper mapper = new ObjectMapper();
				MovieBean bean = mapper.readValue(value.toString(), MovieBean.class);
				context.write(bean, NullWritable.get());
			} catch (Exception e) {
			}
		}
	}
	
	public static class ReduceTask extends Reducer<MovieBean, NullWritable, MovieBean, NullWritable>{
		@Override
		protected void reduce(MovieBean key, Iterable<NullWritable> values,
				Reducer<MovieBean, NullWritable, MovieBean, NullWritable>.Context context)
				throws IOException, InterruptedException {
			int num = 0;
			//虽然是一个空的，但是key能够根据迭代进行相应的得到对应空值的结果
			for (NullWritable nullWritable : values) {
				if(num>=20){
					break;
				}
				num++;
				context.write(key, NullWritable.get());
			}
		}
	}
	
	public static void main(String[] args) throws Exception{
		//声明使用哪个用户提交的
				System.setProperty("HADOOP_USER_NAME", "root");
				
				
				Configuration conf = new Configuration();
				conf.set("fs.defaultFS", "hdfs://bigdata01:9000"); //设置hdfs集群在哪里
				conf.set("mapreduce.framework.name", "yarn"); //提交到哪里   yarn   local
				conf.set("yarn.resourcemanager.hostname", "bigdata01");  //resourcemeanger 在哪里
				
				conf.set("mapreduce.app-submission.cross-platform", "true"); //windows 提交任务到linux上需要设置的参数
		
		//Configuration conf = new Configuration();
		
		Job job = Job.getInstance(conf, "topn3");
		
		//设置map和reduce，以及提交的jar
		job.setMapperClass(MapTask.class);
		job.setReducerClass(ReduceTask.class);
		//job.setJarByClass(TopN3.class);
		job.setJar("C:\\Users\\root\\Desktop\\wc.jar");
		//job.setNumReduceTasks(2);
		job.setPartitionerClass(MyPartition.class);
		job.setGroupingComparatorClass(MyGroup.class);
		
		
		//设置输入输出类型
		job.setMapOutputKeyClass(MovieBean.class);
		job.setMapOutputValueClass(NullWritable.class);
		
		job.setOutputKeyClass(MovieBean.class);
		job.setOutputValueClass(NullWritable.class);
		int numReduceTasks = job.getNumReduceTasks();
		System.err.println(numReduceTasks);
		//输入和输出目录
		FileInputFormat.addInputPath(job, new Path("hdfs://bigdata01:9000/rating2.json"));
		FileOutputFormat.setOutputPath(job, new Path("hdfs://bigdata01:9000/out/json/"));
		
		//判断文件是否存在
		/*File file = new File("d:\\data\\out\\topN3");
		if(file.exists()){
			FileUtils.deleteDirectory(file);
		}*/
		
		//提交任务
		boolean completion = job.waitForCompletion(true);
		System.out.println(completion?"你很优秀！！！":"滚去调bug！！");
	}

}
