package cn.hyxy.hadoop;

import java.io.IOException;
import java.util.function.Consumer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.CounterGroup;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.util.hash.Hash;

public class Demo16_MultipleOutputsMR extends Configured implements Tool {
	@Override
	public int run(String[] args) throws Exception {
		if(args.length!=2) {
			System.out.println("usage ...");
			return -1;
		}
		
		Configuration config = getConf();
		config.set("fs.defaultFS", "hdfs://hadoop31:8020");//添加此信息：本地运行，hdfs 获取文件
		
		//添加系统设置参数，执行“跨平台”运行!!!!!
		config.set("mapreduce.framework.name","yarn");
        config.set("yarn.resourcemanager.hostname","hadoop31");
        config.set("mapreduce.app-submission.cross-platform","true");
        config.set("dfs.permissions", "false");
		 
		FileSystem fs = FileSystem.get(config);
		Path path = new Path(args[1]);
		if(fs.exists(path)) {
			fs.delete(path, true);
		}
		//
		Job job = Job.getInstance(config, "MultipleOutputs");
	
//		job.setJarByClass(getClass());
		job.setJar("./target/hadoop-2.7.6-0.0.1-SNAPSHOT.jar");
		
		job.setMapperClass(MyMapper.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(LongWritable.class);
		// 
		job.setReducerClass(MyReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(LongWritable.class);
		//
		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		
		//压缩输出（Gzip格式!!!!
		FileOutputFormat.setCompressOutput(job, true);
		//设置压缩算法
		FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
		
		//设置Partitioner!!!!!
		job.setNumReduceTasks(2);
		job.setPartitionerClass(HashPartitioner.class);
		
		//
		boolean boo = job.waitForCompletion(true);
		
		//获取counters!!!!!!!!!!!!
	/*	if (boo) {
			Counters counters=job.getCounters();
			counters.forEach((CounterGroup t)-> {
				System.out.println(t.getName()+"\t"+t.getDisplayName());
				t.forEach((Counter c)->{
					System.out.println(c.getName()+":"+c.getDisplayName()+"\t"+c.getValue());
				});
				System.err.println("--------------");
			});
		}
		System.out.println("=================");
		Counter i=job.getCounters().findCounter(TaskCounter.MAP_INPUT_RECORDS);
		System.out.println(i.getValue()+"\t"+i.getName());
		*/
		
		
		//获取自定义的枚举值
//		long line=job.getCounters().findCounter(Mycounter.LINES).getValue();
//		long words=job.getCounters().findCounter(Mycounter.WORDS).getValue();
//		long all_letters=job.getCounters().findCounter(Mycounter.ALL_LETTERS).getValue();
//		System.out.println("总共读取了："+line+"行");
//		System.out.println("平均字符长度："+all_letters/words);
		
		
		return boo ? 0 : 1;
	}
	
	public static void main(String[] args) throws Exception {
		int code = ToolRunner.run(new Demo16_MultipleOutputsMR(), args);
		System.exit(code);
	}
	
	public static class MyMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
		@Override
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
//			context.getCounter(Mycounter.LINES).increment(1);
			
			String str = value.toString();
			String[] strs = str.split("\\s+");
			for (String s : strs) {
				context.write(new Text(s), new LongWritable(1));
				
				//为自定义的counter值赋值!!!!!!!!
//				context.getCounter(Mycounter.WORDS).increment(1);
//				context.getCounter(Mycounter.ALL_LETTERS).increment(s.length());
			}
		}
	}
	
	public static class MyReducer extends Reducer<Text, LongWritable, Text, LongWritable> {
		
		private MultipleOutputs<Text, LongWritable> outputs; //必须要指定“<>”内的参数类型
		
		@Override
		protected void setup(Reducer<Text, LongWritable, Text, LongWritable>.Context context)
				throws IOException, InterruptedException {
			outputs=new MultipleOutputs<>(context);
		}
		
		@Override
		protected void reduce(Text key3, Iterable<LongWritable> value3,
				Reducer<Text, LongWritable, Text, LongWritable>.Context context) throws IOException, InterruptedException {
			long sum = 0;
			for (LongWritable ii : value3) {
				sum = sum + ii.get();
			}
			context.write(key3, new LongWritable(sum));
			
			
			/*//设置条件，将结果输出在2个文件下
			if (sum==1) {
				outputs.write( key3, new LongWritable(sum), "A1");
			}else {
				outputs.write( key3, new LongWritable(sum),"A2");
			}*/
		}
	}
	
	
	/**
	 * 自定义枚举！！！！！
	 */
	public static enum Mycounter{
		LINES,
		WORDS,
		ALL_LETTERS		
	}
	
	
}
