package com.nightsoul.hadoop1.test.sort;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Partitioner;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.nightsoul.hadoop1.test.IntPair;
import com.nightsoul.hadoop1.test.OldJobBuilder;
import com.nightsoul.hadoop1.test.junit.NcdcRecordParser;

//TODO 执行有问题
@SuppressWarnings("rawtypes")
public class MaxTemperatureUsingSecondarySort extends Configured implements	Tool {
	
	static class MaxTemperatureMapper extends MapReduceBase
		implements Mapper<LongWritable, Text, IntPair, NullWritable> {
		private NcdcRecordParser parser = new NcdcRecordParser();
		
		@Override
		public void map(LongWritable key, Text value,
				OutputCollector<IntPair, NullWritable> output, Reporter reporter)
				throws IOException {
			
			parser.parse(value);
			if(parser.isValidTemperature()) {
				//output.collect(new IntPair(parser.getYearInt(), +parser.getAirTemperature()), NullWritable.get());
				
				output.collect(new IntPair(parser.getYearInt(), +parser.getAirTemperature()), NullWritable.get());
			}
		}
		
	}
	
	static class MaxTemperatureReducer extends MapReduceBase
		implements Reducer<IntPair, NullWritable, IntPair, NullWritable> {

		@Override
		public void reduce(IntPair key, Iterator<NullWritable> values,
				OutputCollector<IntPair, NullWritable> output, Reporter reporter)
				throws IOException {
			output.collect(key, NullWritable.get());			
		}
		
	}
	
	public static class FirstPartitioner implements Partitioner<IntPair, NullWritable> {

		@Override
		public void configure(JobConf job) {
		}

		@Override
		public int getPartition(IntPair key, NullWritable value, int numPartitions) {
			return Math.abs(key.getFirst() * 127) % numPartitions;
		}
	}
	
	public static class KeyComparator extends WritableComparator {

		protected KeyComparator() {
			super(IntPair.class, true);
		}
		
		@Override
		public int compare(WritableComparable a, WritableComparable b) {
			IntPair ip1 = (IntPair) a;
			IntPair ip2 = (IntPair) b;
			int cmp = IntPair.compare(ip1.getFirst(), ip2.getFirst());
			if(cmp!=0) {
				return 0;
			}
			return -IntPair.compare(ip1.getSecond(), ip2.getSecond());
		}
	}
	
	static {
		WritableComparator.define(IntPair.class, new KeyComparator());
	}
	
	public static class GroupComparator extends WritableComparator {
		
		public GroupComparator() {
			super(IntPair.class, true);
		}
		
		@Override
		public int compare(WritableComparable a, WritableComparable b) {
			IntPair ip1 = (IntPair) a;
			IntPair ip2 = (IntPair) b;
			return IntPair.compare(ip1.getFirst(), ip2.getFirst());
		}
	}
	
	@Override
	public int run(String[] args) throws Exception {
		JobConf conf = OldJobBuilder.parseInputAndOutput(this, getConf(), args);
		if(conf==null) {
			return -1;
		}
		
		conf.setMapperClass(MaxTemperatureMapper.class);
		conf.setPartitionerClass(FirstPartitioner.class);
		conf.setOutputKeyComparatorClass(KeyComparator.class);
		conf.setOutputValueGroupingComparator(GroupComparator.class);
		
		conf.setReducerClass(MaxTemperatureReducer.class);
		conf.setOutputKeyClass(IntPair.class);
		conf.setOutputValueClass(NullWritable.class);
		
		JobClient.runJob(conf);
		
		return 0;
	}

	public static void main(String[] args) throws Exception {
		int exitCode = ToolRunner.run(new MaxTemperatureUsingSecondarySort(), args);
		System.exit(exitCode);
	}
}
