package com.atguigu.mapreduce.demo.writable;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

/**
 * @version V1.0
 * @author: guangcheng
 * @date: 2021/11/18 19:00
 * @description:
 * 偏移量  一行内容  手机号  自定义输出
 */
public class PCounterMapper extends Mapper<LongWritable, Text, Text, PhoneCounterPojo> {

    Text outK = new Text();
    PhoneCounterPojo outV = new PhoneCounterPojo();


    /**
     *     // 这批文本用 \t 分割的
     *     // 21 	13568436656	192.168.100.18	www.alibaba.com	2481	24681	200
     *     // 22 	13568436656	192.168.100.19			1116	954	200
     *
     * @param key
     * @param value
     * @param context
     * @throws IOException
     * @throws InterruptedException
     */
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//        FileSplit inputSplit = (FileSplit) context.getInputSplit();
//        String name = inputSplit.getPath().getName();
//        System.out.println(name);

        //super.map(key, value, context);

        String line = value.toString();
        String[] lineSplitArr = line.split("\t");

        String up = lineSplitArr[lineSplitArr.length - 3];
        String down = lineSplitArr[lineSplitArr.length - 2];
        outK.set(lineSplitArr[1]);

        outV.setUpFlow(Long.parseLong(up));
        outV.setDownFlow(Long.parseLong(down));
        //空参, 直接计算的那个
        outV.setSumFlow();

        //写出去
        context.write(outK, outV);
    }
}

