package cn.hyxy.hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.DataOutput;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

/**
 * 原始数据： 执行完【Demo02_WordCountMR.java】之后： Jack 3 Json 2 Mary 1 Mike 1 Rose 3
 * 
 * 执行完Mapper之后： Jack 3 Json 2 Mary 1 Mike 1 Rose 3
 *
 * 执行Reducer之后： Rose 3
 */
public class Demo04_MaxCountMR2 extends Configured implements Tool {
	@Override
	public int run(String[] args) throws Exception {
		if (args.length != 2) {
			System.out.println("Usage ...");
			return -1;
		}
		Configuration config = getConf();
		FileSystem fs = FileSystem.get(config);
		Path path = new Path(args[1]);
		if (fs.exists(path)) {
			fs.delete(path, true);
		}
		Job job = Job.getInstance(config, "取最大值");
		job.setJarByClass(getClass());
		System.out.println("------class:" + getClass().getName());

		job.setMapperClass(MyMapper.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(LongWritable.class);
		//
		job.setReducerClass(MyReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(LongWritable.class);
		//
		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, path);
		return job.waitForCompletion(true) ? 0 : 1;
	}

	public static void main(String[] args) throws Exception {
		int code = ToolRunner.run(new Demo04_MaxCountMR2(), args);
		System.exit(code);
	}

	public static class MyMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
		@Override
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

			String[] strs = value.toString().split("\\s+");
			context.write(new Text(strs[0]), new LongWritable(Long.parseLong(strs[1])));
		}
	}

	public static class MyReducer extends Reducer<Text, LongWritable, Text, LongWritable> {
		private long max = 0;
		// private Text str=null;
		 Map<String, LongWritable> map = new HashMap<>();
		 Map<Text, LongWritable> map1 = new HashMap<>();

		@Override
		protected void reduce(Text key3, Iterable<LongWritable> value3,
				Reducer<Text, LongWritable, Text, LongWritable>.Context context)
				throws IOException, InterruptedException {

			long count = value3.iterator().next().get();
			if (count == max) {
				map.put(key3.toString(), new LongWritable(count));
				map1.put(key3, new LongWritable(count));
				/*for(Entry<String, LongWritable> en:map.entrySet()) {
					System.out.println(en.getKey()+",,,,"+en.getValue());
				}*/
				
				for(Entry<Text, LongWritable> en:map1.entrySet()) {
					System.out.println(en.getKey()+",,,,"+en.getValue());
				}
			}
			if (count > max) {
//				 System.out.println("----"+key3.toString()+"----count:"+count); //----Jack----count:3				
				max = count;
				
				//map<String
				map.clear();
				map.put(key3.toString(), new LongWritable(count));	
				
				//map1<Text
				map1.clear();
				map1.put(key3, new LongWritable(count));		
			}
		}

		@Override
		protected void cleanup(Reducer<Text, LongWritable, Text, LongWritable>.Context context)
				throws IOException, InterruptedException {
			Set<String> set = map.keySet();
			for (String text : set) {
				context.write(new Text(text), new LongWritable(max));
			}
			
			/*Set<Text> set = map1.keySet();
			for (Text text : set) {
				context.write(text, new LongWritable(max));
			}*/
		}
	}

}