package online.shenjian.hadoop.reducejoin;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * @author shenjian
 * @since 2023/2/25
 */
public class OrderProductMapper extends Mapper<LongWritable, Text, Text, OrderProductDto> {

    private String filename;
    private Text keyOut = new Text();
    private OrderProductDto orderProductDto = new OrderProductDto();

    @Override
    protected void setup(Mapper<LongWritable, Text, Text, OrderProductDto>.Context context) throws IOException, InterruptedException {
        // 获取当前处理行的文件名，setup只需初始化一次
        FileSplit fileSplit = (FileSplit) context.getInputSplit();
        this.filename = fileSplit.getPath().getName();
    }


    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, OrderProductDto>.Context context) throws IOException, InterruptedException {
        String line = value.toString();
        String[] split = line.split(" ");
        if (filename.contains("order")) {
            // id pid amount
            // 我们需要通过pid关联，所以需要将pid设置为key
            String id = split[0];
            String pid = split[1];
            String amount = split[2];
            keyOut.set(pid);
            orderProductDto.setId(id);
            orderProductDto.setPid(pid);
            orderProductDto.setAmount(Integer.parseInt(amount));
            orderProductDto.setFlag("order");
        } else if (filename.contains("product")) {
            // pid pname
            String pid = split[0];
            String productName = split[1];
            keyOut.set(pid);
            orderProductDto.setPid(pid);
            orderProductDto.setProductName(productName);
            orderProductDto.setFlag("product");
        }
        context.write(keyOut, orderProductDto);
    }
}
