package com.shujia.mr.kqzl2;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;

public class PM25JoinCityMapper extends Mapper<LongWritable, Text, Text, Text> {
    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context) throws IOException, InterruptedException {
        //reduce输入<"1001",["$20180603-50","#万寿福宫，北京"]>
        //reduce输出<"20180603-万寿福宫，北京","50">
        //判断改行数据来自于哪一个文件的
        //通过context获取改行数据来自哪一个切片
        FileSplit inputSplit = (FileSplit) context.getInputSplit();
        String fileName = inputSplit.getPath().getName();
        //根据文件的名字判断该文件是什么类型的数据
        if (fileName.startsWith("city")) {
            //城市文件
            //<0L,"1001,万寿福宫,北京">
            String line = value.toString();
            String[] infos = line.split(",");
            context.write(new Text(infos[0]),new Text("#"+infos[2]+infos[1]));

        } else if (fileName.startsWith("part")) {
            //该行数据是平均值数据
            //<0L,"20180603-1001	50">
            String line = value.toString();
            String[] infos = line.split("\t");
            String[] split = infos[0].split("-");

            context.write(new Text(split[1]),new Text("&"+ split[0]+infos[1]));////<0L,"20180603-1001	50">
        }else {

        }
    }
}
