package com.shujia.mr.hw.filter2;

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class MyFilter2Reducer extends Reducer<Text, Text, NullWritable, Student> {
    @Override
    protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, NullWritable, Student>.Context context) throws IOException, InterruptedException {

        int totalScore = 0;
        Student student = new Student();
        student.setStudentID(key.toString());

        // 各科成绩和学生基本信息
        for (Text value : values) {
            String oneValue = value.toString();
            String[] split = oneValue.split(",");
            if (split.length == 1) {
                // 各科成绩
                totalScore += Integer.parseInt(split[0]);
            } else {
                student.setName(split[0]);
                student.setAge(split[1]);
                student.setGender(split[2]);
                student.setClazz(split[3]);
            }
        }
        student.setScore(Integer.toString(totalScore));

        if (totalScore > 450){
            context.write(NullWritable.get(),student);
        }

    }
}
