package cn.hyxy.hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Iterator;

/** 原始数据：
 * 	执行完【Demo02_WordCountMR.java】之后：
		Jack	3
		Json	2
		Mary	1
		Mike	1
		Rose	3
		
*  执行完Mapper之后：
*  		Jack	3
		Json	2
		Mary	1
		Mike	1
		Rose	3
*
*   执行Reducer之后：
*   	Rose 3
*/
public class Demo04_MaxCountMR extends Configured implements Tool {
    @Override
    public int run(String[] args) throws Exception {
        if (args.length != 2) {
            System.out.println("usage ...");
            return -1;
        }
        // 7:声明Job
        Configuration config = getConf();
        FileSystem fs = FileSystem.get(config);
        Path path = new Path(args[1]);
        if (fs.exists(path)) {
            fs.delete(path, true);
        }
        // 8:Job
        Job job = Job.getInstance(config, "取最大值");
        //!!!!!!!!!!!!!!!!!! 本地  （需要把core-site.xml,mapred-site.xml 移走。）---mapreduce.framework.name--->>local  默认的
        job.setJarByClass(getClass());
        System.out.println("------class:"+getClass().getName());
        
        job.setMapperClass(MyMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(LongWritable.class);
        //
        job.setReducerClass(MyReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(LongWritable.class);
        //
        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, path);
        return job.waitForCompletion(true) ? 0 : 1;
    }

    public static void main(String[] args) throws Exception {
        int code = ToolRunner.run(new Demo04_MaxCountMR(), args);
        System.exit(code);
    }
    
  

    //1:开发Mapper类
    public static class MyMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
        @Override
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
         
            String[] strs = value.toString().split("\\s+");            
            context.write(new Text(strs[0]), new LongWritable(Long.parseLong(strs[1])));			
        }
    }

    public static class MyReducer extends Reducer<Text, LongWritable, Text, LongWritable>{
    	private long max=0;
    	private Text str=null;
    	
    	@Override
    	protected void reduce(Text key3, Iterable<LongWritable> value3,
    			Reducer<Text, LongWritable, Text, LongWritable>.Context context) throws IOException, InterruptedException {

    		long count = value3.iterator().next().get();
    		if (count>max) {
    			if(!key3.toString().equals("")) {
    				System.out.println("----"+key3.toString()+"----length:"+key3.toString().length()+"----count:"+count);
	    			max=count;
	    			str=key3;
    			}
			}
    	}
    	
    	@Override
    	protected void cleanup(Reducer<Text, LongWritable, Text, LongWritable>.Context context)
    			throws IOException, InterruptedException {
    		context.write(str,new LongWritable(max));
    	}
    }    

}