package com.example.hadoopdemo.executor.recommend.movie;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * 按用户分组，计算所有物品出现的组合列表，得到用户对物品的评分矩阵
 *
 * @author Ruison
 * @date 2021/12/7
 */
public class Step1 {

    /**
     * 步骤1 - 解析数据
     */
    public static class ParseDataStep1 extends Mapper<Object, Text, IntWritable, Text> {
        private final static IntWritable k = new IntWritable();
        private final static Text v = new Text();

        @Override
        protected void map(Object key, Text value, Mapper<Object, Text, IntWritable, Text>.Context context) throws IOException, InterruptedException {
            String[] values = Recommend.DELIMITER.split(value.toString());
            int userId = Integer.parseInt(values[0]);
            String itemId = values[1];
            String preferenceScore = values[2];
            k.set(userId);
            v.set(itemId + ":" + preferenceScore);
            context.write(k, v);
        }
    }

    /**
     * 步骤2 - 按用户进行分组
     */
    public static class GroupByUserStep2 extends Reducer<IntWritable, Text, IntWritable, Text> {
        private final Text v = new Text();

        @Override
        protected void reduce(IntWritable key, Iterable<Text> values, Reducer<IntWritable, Text, IntWritable, Text>.Context context) throws IOException, InterruptedException {
            StringBuilder sb = new StringBuilder();
            for (Text value : values) {
                sb.append(",").append(value.toString());
            }
            v.set(sb.toString().replaceFirst(",", ""));
            context.write(key, v);
        }
    }

}
