package 全国粗离婚率;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.junit.Test;

import java.io.IOException;


public class DivorceRateMapper extends Mapper<LongWritable, Text, Text, DivorceRateBean> {
    private Text outKey = new Text();
    private DivorceRateBean outValue = new DivorceRateBean();

    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, DivorceRateBean>.Context context) throws IOException, InterruptedException {
        //北京市,东城区,2022年,0.53,0.52,0.62,0.47,0.01,0.25,148,
        // 按照逗号分割字符串，获取各个字段
        String[] fields = value.toString().split(",");
        if (fields.length == 10) {
            String year = fields[2];
            float divorceCount = Float.parseFloat(fields[8]);
            long populationCount = Long.parseLong(fields[9]);

            // 设置输出的键为年份（整数类型，便于后续按年份排序）
            outKey.set(year);
            // 创建DivorceRateBean对象并设置相关属性
            outValue.set(divorceCount,populationCount);

            // 将键值对写入上下文，传递给Reducer
            context.write(outKey, outValue);
        }
    }
}