package qdu.edu.com.fushanf4.service.count.usertype;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import qdu.edu.com.fushanf4.utils.UtilConstants;

import java.io.IOException;

public class ClassificationOfProductDescriptionsByUserTypeMapper extends Mapper<LongWritable, Text, Text, Text> {
    private final static String SEMICOLON = ";";
    private final static String COLON = ":";
    private final Text valueOut = new Text();
    private final Text keyOut = new Text();

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String line = value.toString();

        String[] split = line.split(UtilConstants.RegexOfString.HORIZONTAL_TABULATION); // 横向制表符

        if (split.length <= 1) {    // 不符合格式的数据会造成数组越界
            return;
        }
        // split[0]作为valueOut
        valueOut.set(split[0]);
        // split[1]中要分两次切片，获取两次数据，分别存到两个数组中
        String[] users = split[1].split(SEMICOLON);

        // 初始化，防止写入错误的数据（数据格式有问题时，maxUser为null，断言可以检测到）
        String maxUser = null;
        // 用int没用double是可以减小开销，且基本不会影响结果
        int maxDegree = Integer.MIN_VALUE;
        for (String user : users) {
            // 切分获得用户类型和用户相关度的值
            String[] userNameAndDegree = user.split(COLON);
            if (userNameAndDegree.length <= 1) {    // 防止数据格式问题导致数组越界
                return;
            }
            if (Double.parseDouble(userNameAndDegree[1]) > maxDegree) {
                // 将当前值作为最大值
                maxDegree = (int)Double.parseDouble(userNameAndDegree[1]);
                maxUser = userNameAndDegree[0];
            }
        }

        // 将最大值对应的用户类型作为keyOut
        assert maxUser != null;
        keyOut.set(maxUser);

        // 写出
        context.write(keyOut, valueOut);
    }
}
