//package cn.hyxy.hadoop;
//
//import org.apache.hadoop.conf.Configuration;
//import org.apache.hadoop.conf.Configured;
//import org.apache.hadoop.fs.FileSystem;
//import org.apache.hadoop.fs.Path;
//import org.apache.hadoop.io.LongWritable;
//import org.apache.hadoop.io.Text;
//import org.apache.hadoop.io.Writable;
//import org.apache.hadoop.mapreduce.Job;
//import org.apache.hadoop.mapreduce.Mapper;
//import org.apache.hadoop.mapreduce.Reducer;
//import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
//import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
//import org.apache.hadoop.util.Tool;
//import org.apache.hadoop.util.ToolRunner;
//
//import java.io.DataInput;
//import java.io.DataOutput;
//import java.io.IOException;
//import java.util.Iterator;
//
///** 原始数据：
// * Jack	3
//	Json	2
//	Mary	1
//	Mike	1
//	Rose	3
// */
//
///**
//* 执行完Mapper后：
//*  {1,[Mary,Mike]
//*  {2,[Json]}
//*  {3,[Rose,Jack]}
//*
//* 接下来执行Reduce
//*/
//public class Demo04_MaxCountMR2 extends Configured implements Tool {
//    @Override
//    public int run(String[] args) throws Exception {
//        if (args.length != 2) {
//            System.out.println("usage ...");
//            return -1;
//        }
//        // 7:声明Job
//        Configuration config = getConf();
//        FileSystem fs = FileSystem.get(config);
//        Path path = new Path(args[1]);
//        if (fs.exists(path)) {
//            fs.delete(path, true);
//        }
//        // 8:Job
//        Job job = Job.getInstance(config, "取最大值");
//        //!!!!!!!!!!!!!!!!!! 本地  （需要把core-site.xml,mapred-site.xml 移走。）
//        job.setJarByClass(getClass());
//        
//        job.setMapperClass(MyMapper.class);
//        job.setMapOutputKeyClass(Text.class);
//        job.setMapOutputValueClass(LongWritable.class);
//        //
//        job.setReducerClass(MyReducer.class);
//        job.setOutputKeyClass(Text.class);
//        job.setOutputValueClass(LongWritable.class);
//        //
//        FileInputFormat.addInputPath(job, new Path(args[0]));
//        FileOutputFormat.setOutputPath(job, path);
//        return job.waitForCompletion(true) ? 0 : 1;
//    }
//
//    public static void main(String[] args) throws Exception {
//        int code = ToolRunner.run(new Demo04_MaxCountMR2(), args);
//        System.exit(code);
//    }
//    
//  
//
//    //1:开发Mapper类
//    public static class MyMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
//        @Override
//        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//         
//            String[] strs = value.toString().split("\\s+");
//            for (String string : strs) {
//				context.write(new Text(string), new LongWritable(1));
//			}
//        }
//    }
//
//    public static class MyReducer extends Reducer<Text, LongWritable, Text, LongWritable>{
//    	private long max=0;
//    	private Text str=null;
//    	
//    	@Override
//    	protected void reduce(Text key3, Iterable<LongWritable> value3,
//    			Reducer<Text, LongWritable, Text, LongWritable>.Context arg2) throws IOException, InterruptedException {
//    		long sum=0;
//    		for (LongWritable ii : value3) {
//				sum+=ii.get();
//			}
//    		if (sum>max) {
//    			max=sum;
//    			str=key3;
//			}       		
//    	}
//    	
//    	@Override
//    	protected void cleanup(Reducer<Text, LongWritable, Text, LongWritable>.Context context)
//    			throws IOException, InterruptedException {
//    		context.write(str,new LongWritable(max));
//    	}
//    }
//
//    
//
//}
//
//
//
