package com.yjxxt.studentInfo;

import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class StudentInfoReducer extends Reducer<Text, Text ,StudentInfo , NullWritable> {
    @Override
    protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, StudentInfo, NullWritable>.Context context) throws IOException, InterruptedException {
        for (Text value : values) {
            //拆分数据
            String [] split = value.toString().split("\t");
            // 判断是否为脏数据
            if(split != null && split.length==5){

                // 视频成绩 ， 作业成绩 ，考试成绩 ， 总成绩 ，性别
                double video_grade = Double.parseDouble(split[0]);
                double job_grade = Double.parseDouble(split[1]);
                double exam_grade = Double.parseDouble(split[2]);
                double total_grade = Double.parseDouble(split[3]);
                String gender=split[4];

                StudentInfo studentInfo=new StudentInfo("0",gender,video_grade,job_grade,exam_grade,total_grade);
                context.write(studentInfo,NullWritable.get());

            }
        }
    }
}
