package org.example;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;

import java.io.IOException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;

public class Mapper extends org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, Text, LongWritable> {
    private Text outputKey = new Text();
    private LongWritable outputValue = new LongWritable();
    private boolean isHeader = true; // 标记是否为表头

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String line = value.toString();
        String[] fields = line.split(",");
        if (isHeader) {
            isHeader = false; // 跳过表头
            return;
        }
        if (fields.length >= 21) {
            String code = fields[0].trim();
            String dateStr = fields[2].trim();
            String volumeStr = fields[8].trim();

            try {
                DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd");
                LocalDate date = LocalDate.parse(dateStr, formatter);
                int year = date.getYear();
                int quarter = date.getMonthValue() / 4 + 1; // 计算季度
                String yearQuarter = year + "Q" + quarter;

                long volume = Long.parseLong(volumeStr);
                outputKey.set(code + "\t" + yearQuarter);
                outputValue.set(volume);
                context.write(outputKey, outputValue);
            } catch (Exception e) {
                // 忽略解析错误
                //System.err.println("Error parsing line: " + line);
            }
        }
    }
}
