package com.xjdx;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.mllib.recommendation.ALS;
import org.apache.spark.mllib.recommendation.MatrixFactorizationModel;
import org.apache.spark.mllib.recommendation.Rating;
import org.apache.spark.rdd.RDD;
import scala.Tuple2;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.*;

public class recommender {
    static {
        System.setProperty("HADOOP_USER_NAME","masterlb");
    }



    private final int BUFF_SIZE = 4096;
    private JavaSparkContext sc;

    public recommender(JavaSparkContext sc) {

        this.sc = sc;
    }

    public static void main(String[] args) throws IOException {
        FileSystem fs;
        FileContext fileContext;
        Configuration conf1 = new Configuration();
        conf1.set("fs.defaultFS","hdfs://hadoopmaster:9000");
        fs = FileSystem.get(conf1);
        fileContext = FileContext.getFileContext(conf1);

        SparkConf conf = new SparkConf().setAppName("product").setMaster("local");
        JavaSparkContext sc = new JavaSparkContext(conf);

        File fileteam6data = new File("team6data");
        if (fileteam6data.exists()){

            System.out.println(delFile(fileteam6data));
        }
        try {
            fs.copyToLocalFile(false,new Path("/team6data"),new Path("./team6data"),true);
        } catch (IOException e) {
            e.printStackTrace();
        }

        JavaRDD<String> rawRatings = sc.textFile("team6data/u.rating");
        JavaRDD<String> rawMovies = sc.textFile("team6data/u.product");
        JavaRDD<String> rawUsers = sc.textFile("team6data/u.user");

        recommender recommender = new recommender(sc);
        //recommender.preparation(rawRatings, rawMovies, rawUsers);

        JavaRDD<Rating> ratings = recommender.buildRating(rawRatings);
        Map<Integer, String> movieAndName = recommender.buildMovie(rawMovies);
        Broadcast<Map<Integer, String>> bmovieAndName = sc.broadcast(movieAndName);//将movie和名字广播给其他节点
        Map<Integer, String> userAndName = buildUser(rawUsers);
        Broadcast<Map<Integer, String>> bUserAndName = sc.broadcast(userAndName);

        //JavaRDD<Rating>[] ratingSplits = ratings.randomSplit(new double[]{0.8, 0.2});//拆分，80做开发，20做测试
        //JavaRDD<Rating> training = ratingSplits[0];
        //JavaRDD<Rating> test = ratingSplits[1];

        MatrixFactorizationModel model = recommender.model(rawRatings);//训练模型

        List<Integer> bbbs = recommender.bbb(rawUsers);
        Rating[] recomendedProduct=null;

        //zai ti jiao zhi qian xainshanchu hdfs shang de wenjian shangchuan
        //fs.delete(new Path("/user/masterlb/dataoutput/"), true);
        for (int x:bbbs){
            File file =new File("recommend"+x);
            Writer out =new FileWriter(file);

            recomendedProduct = model.recommendProducts( x,8);//给1号用户推荐5个商品
            System.out.println("给用户推荐的商品： ");
            for (Rating reRating : recomendedProduct){
                System.out.println(reRating.user()+","+reRating.product()+","+bmovieAndName.getValue().getOrDefault(reRating.product(), String.valueOf(reRating.product())));
                String data= reRating.user()+","+reRating.product()+"\n";
                out.write(data);
            }
            out.close();
            fs.copyFromLocalFile(false,new Path(file.getName()),new Path("/user/masterlb/dataoutput/"));
        }

//


//        System.out.println(model.userFeatures().count());//打印用户特征的数量
//        System.out.println(model.productFeatures().count());//打印商品特征的数量

    }

    static boolean delFile(File file) {
        if (!file.exists()) {
            return false;
        }

        if (file.isDirectory()) {
            File[] files = file.listFiles();
            for (File f : files) {
                delFile(f);
            }
        }
        return file.delete();
    }

    //data clean,transformation
    public void preparation(JavaRDD<String> rawRatings, JavaRDD<String> rawMovies, JavaRDD<String> rawUsers) {

        rawRatings.take(5).forEach(x -> System.out.println(x));//将评分表里的前5条数据拿出来
    }

    //build model，训练集用户、物品和评分
    //填充稀疏矩阵的空白，根据权重给一个评分,double类型
    public MatrixFactorizationModel model(JavaRDD<String> rawRatings) {
        JavaRDD<Rating> ratings = buildRating(rawRatings);
        //训练集（希望是rdd所以转换了一下）double、特征值 double、迭代参数、防过拟合参数
        return ALS.train(ratings.rdd(),50,10,0.01);
    }

    //规范ratings
    public JavaRDD<Rating> buildRating(JavaRDD<String> rawRatings) {
        return rawRatings.map(line -> {
            String[] splits = line.split("\\|");
            Integer userId = Integer.parseInt(splits[0]);
            Integer movieId = Integer.parseInt(splits[1]);
            Integer rating = Integer.parseInt(splits[2]);

            return new Rating(userId, movieId, rating);
        });
    }

    public List<Integer> bbb(JavaRDD<String> rawUsers){
        return rawUsers.map(line->{
            String[] splits = line.split("\\|");
            Integer userId = Integer.parseInt(splits[0]);
            return userId;
        }).collect();

    }

    public Map<Integer,String> buildMovie(JavaRDD<String> rawMovies){
        return rawMovies.mapToPair(line -> {
            String[] splits = line.split("\\|");
            Integer movieId = Integer.parseInt(splits[0]);
            return new Tuple2<>(movieId,splits[2]);//将商品的id和名字返回

        }).collectAsMap();
    }
    public static Map<Integer, String> buildUser(JavaRDD<String> rawUsers) {
        return rawUsers.flatMapToPair(line -> {
            String[] tokens = line.split("\\|");
            if (tokens.length > 1) {
                return Arrays.asList(new Tuple2<>(Integer.valueOf(tokens[0]), tokens[2])).iterator();
            } else {
                return Collections.<Tuple2<Integer, String>>emptyList().iterator();
            }
        }).collectAsMap();
    }

}

