package com.lxl.testHd.etl.one;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * @author ：e_lixilin
 * @date ：2022/2/21 14:33
 * @description：
 * @modified By：
 */
public class MyEtlOneMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
    NullWritable outValue =  NullWritable.get();
    enum MyCount {NOMAL,ERROR}
    public static int FIELD_LENGTH =5;
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        // 清理掉数据不全的脏数据
        String line = value.toString();
        if (StringUtils.isNotBlank(line)) {
            String[] split = line.split("\t");
            if (split.length == FIELD_LENGTH) {
                context.getCounter(MyCount.NOMAL).increment(1);
                context.write(value,outValue);
            }else {
                context.getCounter(MyCount.ERROR).increment(1);
            }
        }else {
            context.getCounter(MyCount.ERROR).increment(1);
        }
    }
}
