package com.etc;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;

public class UserMap extends Mapper<LongWritable, Text,Text,User> {
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        FileSplit split = (FileSplit)context.getInputSplit();
        String fileName = split.getPath().getName();
        //强制转换文本类型
        String line = value.toString();
        //按照","切分
        String[] Array = line.split(",");
        //User user = new User();
        //判断文件名后缀是否为.csv
        if(fileName.endsWith(".csv")){
            String Coding = "null";
            String UserId = Array[0];
            String Name = Array[1];
            int Age = Integer.parseInt(Array[2]);
            String Star = Array[3];
            String Logo = "1";
            context.write(new Text(Array[0]),new User(Coding,UserId,Name,Age,Star,Logo));
        }else if(fileName.endsWith(".bat")){
            String Coding = Array[0];
            String UserId = Array[1];
            String Name = "null";
            int Age = 0;
            String Star = "null";
            String Logo = "2";
            context.write(new Text(Array[1]),new User(Coding,UserId,Name,Age,Star,Logo));
        }
    }
}
