package com.wsx.demo;

import java.io.IOException;

import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import com.wsx.gradeanaly.Student;

public class TestMapper extends Mapper<LongWritable, Text, Text, Text> {

	private float max = Float.MIN_VALUE;
	private float min = Float.MAX_VALUE;
	
	private String[] file_paths = {"hdfs://localhost:9000/user/root/input/nihao", "hdfs://localhost:9000/user/root/input/hello"};
	private long[] num = new long[2];
	private double[] sum = new double[2];
	
    @Override
	protected void map(LongWritable key, Text value,
			Mapper<LongWritable, Text, Text, Text>.Context context)
			throws IOException, InterruptedException {
    	String line = value.toString().trim();
    	if (line == null || line.equals("")) {
            return;  
        }
    	float f = Float.parseFloat(line);
    	if(f > max){
    		max = f;
    	}
    	if(f < min){
    		min = f;
    	}
    	
    	InputSplit  inputSplit = context.getInputSplit();
		String fileName = ((FileSplit) inputSplit).getPath() .toString();
		
		for(int i=0; i<file_paths.length; i++){
			if((file_paths[i]).equals(fileName)){
				sum[i]+=f;
				num[i]+=1;
			}
		}
	}
    
	@Override
	protected void cleanup(Mapper<LongWritable, Text, Text, Text>.Context context)
			throws IOException, InterruptedException {
        context.write(new Text("max"), new Text(max+""));
        context.write(new Text("min"), new Text(min+""));
        
        InputSplit  inputSplit = context.getInputSplit();
		String fileName = ((FileSplit) inputSplit).getPath() .toString();
		for(int i=0; i<file_paths.length; i++){
			if((file_paths[i]).equals(fileName)){
				context.write(new Text("aver"), new Text(sum[i]+":"+num[i]));
			}
		}
	}
	
}
