package com.rk.recommand;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;

public class UserCF_Step4 {
    public static class Step4Mapper extends Mapper<LongWritable, Text, Text, Text>{
        private String flag;// A:step3 or B:data
        private int itemNum = 100; // 7

        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            FileSplit split = (FileSplit) context.getInputSplit();
            flag = split.getPath().getParent().getName();  // 判断读的数据集
            System.out.println("flag: " + flag);
        }

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            String[] words = CommonConst.DELIMITER.split(line);

            int itemIndex = 100;
            if (flag.equals("step3")) {
                for (int i = 1; i <= itemNum; i++)
                {
                    Text k = new Text(Integer.toString(itemIndex + i));//itemid
                    Text v = new Text("A:" + words[0] + "," + words[1] + "," + words[3]);
                    context.write(k, v);
//            		System.out.println(k.toString() + "  " + v.toString());
                }
            } else if (flag.equals("data")) {
                Text k = new Text(words[1]);//itemid
                Text v = new Text("B:" + words[0] + "," + words[2]);//userid + score
                context.write(k, v);
//	            System.out.println(k.toString() + "  " + v.toString());
            }
        }
    }

    public static class Step4Reducer extends Reducer<Text, Text, Text, Text>{
        @Override
        protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
            Map<String, String> mapA = new HashMap<String, String>();
            Map<String, String> mapB = new HashMap<String, String>();

            for (Text line : values) {
                String val = line.toString();

                if (val.startsWith("A:")) {
                    String[] kv = CommonConst.DELIMITER.split(val.substring(2));
                    mapA.put(kv[0], kv[1] + "," + kv[2]);
                } else if (val.startsWith("B:")) {
                    String[] kv = CommonConst.DELIMITER.split(val.substring(2));
                    mapB.put(kv[0], kv[1]);
                }
            }

            Iterator<String> iterA = mapA.keySet().iterator();
            while (iterA.hasNext())
            {
                String userId = iterA.next();
                if (!mapB.containsKey(userId))//不存在可以推荐 有买过这个物品的不推荐
                {
                    String simiStr = mapA.get(userId);
                    String[] simi = CommonConst.DELIMITER.split(simiStr);
                    if (simi.length >= 2)
                    {
                        double simiVal1 = mapB.containsKey(simi[0]) ? Double.parseDouble(mapB.get(simi[0])) : 0;
                        double simiVal2 = mapB.containsKey(simi[1]) ? Double.parseDouble(mapB.get(simi[1])) : 0;
                        double score = (simiVal1 + simiVal2) / 2;

                        context.write(new Text(userId), new Text(key.toString() + "," + String.format("%.2f", score)));
                    }
                }
            }

        }
    }

    public static void run(Map<String, String> path) throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf = new Configuration();
        System.setProperty("HADOOP_USER_NAME", "root");
        conf.set("mapreduce.job.jar", "D:\\projects\\hadoop-study\\target\\hadoop-study-1.0-SNAPSHOT-jar-with-dependencies.jar");
        conf.set("mapreduce.app-submission.cross-platform", "true");//意思是跨平台提交

        String input1 = path.get("input1_step4");
        String input2 = path.get("input2_step4");
        String output = path.get("output_step4");

        Job job = Job.getInstance(conf, "UserCF_Step4 job");
        job.setJarByClass(UserCF_Step4.class);
        job.setMapperClass(Step4Mapper.class);
        job.setReducerClass(Step4Reducer.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        FileInputFormat.setInputPaths(job, new Path(input1), new Path(input2));// 加载2个输入数据集
        FileOutputFormat.setOutputPath(job, new Path(output));

        System.out.println("input1: " + input1);
        System.out.println("input2: " + input2);
        System.out.println("output: " + output);

        if (!job.waitForCompletion(true))
        {
            System.out.println("UserCF_Step4 run stop!");
            return;
        }

        System.out.println("UserCF_Step4 run successfully!");
    }

}
