package com.lagou.mr.reduce_join;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;

public class ReduceJoinMapper extends Mapper<LongWritable, Text,Text,DeliverBean> {



    String name ="";
    //map任务启动时初始化执行一次
    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        InputSplit inputSplit = context.getInputSplit();
        FileSplit split = (FileSplit)inputSplit;
        name = split.getPath().getName();//获得文件名
    }

    DeliverBean bean = new DeliverBean();

    Text k = new Text();

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

        String[] fields = value.toString().split("\t");
        //投递行为数据
        if (name.startsWith("deliver_info")) {
            //投递行为数据
            bean.setUserId(fields[0]);
            bean.setPositionId(fields[1]);
            bean.setPositionName("");
            bean.setDate(fields[2]);
            bean.setFlag("deliver");
        }else{
            bean.setUserId("");
            bean.setPositionId(fields[0]);
            bean.setPositionName(fields[1]);
            bean.setDate("");
            bean.setFlag("position");
        }
        k.set(bean.getPositionId());
        context.write(k,bean);
    }
}
