package com.factors.DataClean;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;


public class PerformanceMapper extends Mapper<LongWritable, Text, Text, NullWritable> {

    // 日志
    private static final Logger LOGGER = LoggerFactory.getLogger(PerformanceMapper.class);

    // 标记是否已经跳过了第一行
    private static boolean isFirstLineSkipped = false;

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        // 如果是第一行并且还没有被跳过，则跳过该行
        if (!isFirstLineSkipped && key.get() == 0) {
            isFirstLineSkipped = true;
            LOGGER.info("跳过第一行：{}", value.toString());
            return;
        }

        String line = value.toString();
        String[] arr = line.split(",", -1); // 保留尾部空字段

        int blankFields = 0;
        for (String field : arr) {
            if (StringUtils.isBlank(field)) {
                blankFields++;
            }
        }

        if (blankFields >= 1) {
            context.getCounter("DeleteCounter", "deleted").increment(1L);
            LOGGER.warn("删除1行：{}", line);
            return;
        }

        // 直接使用原始的 Text 对象，避免不必要的字符串创建
        context.write(value, NullWritable.get());
    }
}