package com.bclz.task.groupcomparing;

import java.io.IOException;
import java.math.BigDecimal;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;

import com.bclz.config.JobRun;
/**
 * 
* @ClassName: MapReduceExe  
* @Description: 求单笔点订单最高价格的前N项  
* @author xuchang  
* @date 2018年11月9日
 */
public class MapReduceExe {
		
	
	public static class MapperTask extends Mapper<LongWritable, Text, Order, NullWritable>{
		
		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			String[] infos=value.toString().split(",");
			context.write(new Order(infos[0], infos[1], 
					(new BigDecimal(infos[3])).multiply(new BigDecimal(infos[4])).toString(), 
					infos[2]), NullWritable.get());
			
		}
		
	}
	
	public static class ReduceTask extends Reducer<Order, NullWritable, Order, NullWritable>{
		
		/**
		 * 采用GroupingComparable聚合后，虽然只有一个order,但该对象迭代器每循环一次就会变
		 */
		@Override
		protected void reduce(Order order, Iterable<NullWritable> infos,
				Reducer<Order, NullWritable, Order, NullWritable>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			int n=0;
			for (NullWritable nw : infos) {
				if(n==2) return;
				context.write(order, nw);
				n++;
			}
			
		}
		
		
	}
	
	public static void main(String[] args) {
		System.setProperty("HADOOP_USER_NAME", "hadoop");
		Configuration c=new Configuration();
		
		try {
			Job job = Job.getInstance(c);
			job.setJarByClass(MapReduceExe.class);
			//设置切片方式
			job.setPartitionerClass(OrderPartioner.class);
			//设置Grouping方式
			job.setGroupingComparatorClass(OrderGroupingComparable.class);
			
			job.setMapperClass(MapperTask.class);
			job.setReducerClass(ReduceTask.class);
			
			job.setMapOutputKeyClass(Order.class);
			job.setMapOutputValueClass(NullWritable.class);
			
			job.setOutputKeyClass(Order.class);
			job.setOutputValueClass(NullWritable.class);
			
			JobRun.runJob(job, "E:\\hadoop_input\\test\\order", "E:\\hadoop_input\\test\\orderoutput", 2);
			
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		
		
	}
	
}
