package com.zhanghe.study.mapreduce.join;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;

/**
 * @author zh
 * @date 2021/3/30 11:32
 */
public class JoinMapper extends Mapper<LongWritable,Text, Text,TableBean> {
    String name;
    TableBean bean = new TableBean();
    Text k = new Text();

    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        InputSplit inputSplit = context.getInputSplit();
        if(inputSplit instanceof FileSplit){
            // 获取文件名
            name = ((FileSplit) inputSplit).getPath().getName();
        }
    }

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        // 获取每行的记录
        String[] str = value.toString().split("\t");
        // 订单表
        if(StringUtils.isNotBlank(name) && StringUtils.startsWith(name,"order")){
            // id  pid  amount
            bean.setId(str[0]);
            bean.setPid(str[1]);
            bean.setAmount(Integer.parseInt(str[2]));
            bean.setFlag("order");
            bean.setName("");
            k.set(str[1]);
        }
        // 商品表
        else if(StringUtils.isNotBlank(name) && StringUtils.startsWith(name,"product")){
            //  pid  name
            bean.setId("");
            bean.setPid(str[0]);
            bean.setName(str[1]);
            bean.setFlag("product");
            k.set(str[0]);
        }
        context.write(k,bean);
    }
}
