package com.briup.prs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.input.MultipleInputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;

/**
 * @author adam
 * @date 2022/2/8
 */
public class Step6 extends Configured implements Tool {
    public static class S6Mapper extends Mapper<Text, Text, Text, Text> {
        @Override
//        key：商品A   value:偏好矩阵or 共现矩阵
        protected void map(Text key, Text value, Context context) throws IOException, InterruptedException {
//            原样输出
//            context.write(key, value);
            super.map(key, value, context);
        }

    }

    public static class S6Reducer extends Reducer<Text, Text, Text, Text> {
        @Override
//        key:商品A  values[共现矩阵,偏好矩阵]
//        共现矩阵  商品:次数, 商品:次数, 商品:次数, 商品:次数……
//        偏好矩阵  用户-偏好值,用户-偏好值,用户-偏好值,用户-偏好值……
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
//            存放偏好数据
            HashMap<String, Double> map1 = new HashMap<>();
//            存放共现数据
            HashMap<String, Double> map2 = new HashMap<>();
//            遍历
            for (Text value : values) {
                String s = value.toString();
//                将矩阵里边的每一个元素切割出来
                String[] data = s.split(",");
//               s是 偏好矩阵  data里边是每个偏好值元素
                if (s.contains("-")) {
                    for (String datum : data) {
//                        strings=[用户,偏好值]
                        String[] strings = datum.split("-");
//                        map1存放用户和偏好值
                        map1.put(strings[0], Double.parseDouble(strings[1]));
                    }
                } else {
//                     s是 共现矩阵  data里边是每个共现元素
                    for (String datum : data) {
//                        strings=[商品,共现次数]
                        String[] strings = datum.split(":");
//                         map2存放商品和共现次数
                        map2.put(strings[0], Double.parseDouble(strings[1]));
                    }
                }
            }
//            k1:用户  v1:偏好值
            map1.forEach((k1, v1) -> {
//                k2:shop  v2:count
                map2.forEach((k2, v2) -> {
                    Text oKey = new Text(k1 + ":" + k2);
//                    推荐值
                    Text oValue = new Text(String.valueOf(v1 * v2));
                    try {
                        context.write(oKey, oValue);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                });
            });

        }
    }


    @Override
    public int run(String[] strings) throws Exception {

        Configuration conf = getConf();
        String in1 = conf.get("in1");
        String in2 = conf.get("in2");
        String out = conf.get("out");
        //设置分隔符
        conf.set(KeyValueLineRecordReader.KEY_VALUE_SEPARATOR, "\t");
        Job job = Job.getInstance(conf, "step6");
        job.setJarByClass(Step6.class);

        job.setMapperClass(S6Mapper.class);
        job.setInputFormatClass(KeyValueTextInputFormat.class);
        KeyValueTextInputFormat.setInputPaths(job,
                new Path(in1),
                new Path(in2));

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        job.setReducerClass(S6Reducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(DoubleWritable.class);

        job.setOutputFormatClass(TextOutputFormat.class);
        TextOutputFormat.setOutputPath(job,
                new Path(out));
        job.waitForCompletion(true);
        return 0;
    }

    public static void main(String[] args) throws Exception {
        ToolRunner.run(new Step6(), args);
    }
}
