package com.atguigu.mapreduce.solution;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;

public class SolutionMapper extends Mapper<LongWritable, Text, Text, OrderBean> {
    private String fileName;
    private final Text userId = new Text();
    private final OrderBean order = new OrderBean();
    @Override
    protected void setup(Mapper<LongWritable, Text, Text, OrderBean>.Context context) throws IOException, InterruptedException {
        FileSplit fs = (FileSplit) context.getInputSplit();
        fileName = fs.getPath().getName();
    }

    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, OrderBean>.Context context) throws IOException, InterruptedException {
        String[] fields = value.toString().split(",");
        if ("order.csv".equals(fileName)) {
            userId.set(fields[1]);
            order.setOrderId(fields[0]);
            order.setUserId(fields[1]);
            order.setAmount(Integer.parseInt(fields[2]));
            order.setEmail("");
        } else {
            userId.set(fields[0]);
            order.setUserId(fields[0]);
            order.setEmail(fields[1]);
            order.setAmount(0);
            order.setOrderId("");
        }
        context.write(userId, order);

    }
}
