package com.shujia.mapreduce.folow_count_demo01;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;


public class FlowCountMapper extends Mapper<LongWritable, Text,Text,FlowBean> {
    /*
     * map将K1,V1转换为K2,V2
     *
     * K1      V1
     * 2      a  3
     * 5      b  7
     * ------------
     * K2                   V2
     * SortBean(a  3)   NullWritable
     * SortBean(b  3)   NullWritable
     * */
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String[] split = value.toString().split("\t");
        String phoneNum=split[1];
        FlowBean flowBean = new FlowBean();
        flowBean.setUpFlow(Integer.parseInt(split[2]));
        flowBean.setDownFlow(Integer.parseInt(split[3]));
        flowBean.setUpCountFlow(Integer.parseInt(split[4]));
        flowBean.setDownCountFlow(Integer.parseInt(split[5]));
        //System.out.println(split[1]+"/"+split[2]+"/"+split[3]+"/"+split[4]+"/"+split[5]);

        context.write(new Text(phoneNum),flowBean);
    }
}
