package org.com.blbl.partitionCount;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

public class PartitionCountMapper extends Mapper<LongWritable, Text, Text, LongWritable> {


    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

        Text keyOut = new Text();
        LongWritable valueOut = new LongWritable(1);
        //将读取的一行内容根据分隔符进行拆分
        String[] files = value.toString().split(",");
        String partitionColumn = files[files.length - 7];
        if (!partitionColumn.isEmpty()) { // 检查是否为空字符串
            keyOut.set(partitionColumn); // 设置输出键
            context.write(keyOut, valueOut); // 输出键值对
        }

    }

}
