package com.example.hadoopdemo.executor.recommend.movie;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * 对物品组合列表进行计数，建立物品的同现矩阵
 *
 * @author Ruison
 * @date 2021/12/8
 */
public class Step2 {

    /**
     * 步骤1 - 解析数据
     */
    public static class ParseDataStep1 extends Mapper<LongWritable, Text, Text, IntWritable> {
        private final static Text k = new Text();
        private final static IntWritable v = new IntWritable(1);

        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context) throws IOException, InterruptedException {
            String[] values = Recommend.DELIMITER.split(value.toString());
            for (int i = 1; i < values.length; i++) {
                String itemId1 = values[i].split(":")[0];
                for (int j = 1; j < values.length; j++) {
                    String itemId2 = values[j].split(":")[0];
                    k.set(itemId1 + ":" + itemId2);
                    context.write(k, v);
                }
            }
        }
    }

    /**
     * 步骤2 - 按物品进行分组
     */
    public static class CombinationStatisticsStep2 extends Reducer<Text, IntWritable, Text, IntWritable> {
        private final IntWritable result = new IntWritable();

        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable value : values) {
                sum += value.get();
            }
            result.set(sum);
            context.write(key, result);
        }
    }
}
