package com.hujf.mapreduce.join;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;

/**
 * @author Hujf
 * @title: JoinMapper
 * @date 2021/4/314:09
 * @description: TODO
 */
public class JoinMapper extends Mapper<LongWritable, Text,Text,TableBean> {
    private String fileName;
    private Text outk = new Text();
    private TableBean outv = new TableBean();
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
       String line = value.toString();
        //订单
       if(fileName.contains("order")){
           String[] split = line.split("\t");
           outk.set(split[1]);
           outv.setId(split[0]);
           outv.setPid(split[1]);
           outv.setAmount(Integer.parseInt(split[2]));
           outv.setFlag("order");
           outv.setPname("");
           context.write(outk,outv);
       }else{
           String[] split = line.split("\t");
           outk.set(split[0]);
           outv.setId("");
           outv.setPid(split[0]);
           outv.setAmount(0);
           outv.setPname(split[1]);
           outv.setFlag("product");
           context.write(outk,outv);
       }
    }

    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        FileSplit in = (FileSplit) context.getInputSplit();
        fileName = in.getPath().getName();
    }
}
