package com.tanhua.spark.mongo;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.mllib.recommendation.MatrixFactorizationModel;
import org.apache.spark.mllib.recommendation.Rating;
import org.bson.Document;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisCluster;
import scala.Tuple2;

import java.io.InputStream;
import java.util.*;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

public class SparkVideo {

    private static String MONGODB_HOST = System.getenv("MONGODB_HOST")==null?"192.168.136.160:27017":System.getenv("MONGODB_HOST");
    private static String REDIS_NODES = System.getenv("REDIS_NODES")==null?"192.168.136.160:6379":System.getenv("REDIS_NODES");
    private static final Integer SCHEDULE_PERIOD = 2;

    public SparkVideo() {

    }

    public static void executeRecommend() {
        Runnable runnable = () -> {
            try {
                execute();
            } catch (Exception var1) {
                var1.printStackTrace();
            }

        };
        ScheduledExecutorService service = Executors.newSingleThreadScheduledExecutor();
        service.scheduleAtFixedRate(runnable, 0L, (long)SCHEDULE_PERIOD, TimeUnit.MINUTES);
    }

    public static void execute() throws Exception{

        String mongourl = "mongodb://"+MONGODB_HOST+"/tanhua.video_score?readPreference=primaryPreferred";


        SparkConf sparkConf = (new SparkConf()).setAppName("SparkVideo").setMaster("local[*]").set("spark.driver.host", "localhost").
                set("spark.mongodb.input.uri", mongourl);

        JavaSparkContext jsc = new JavaSparkContext(sparkConf);
        //加载MongoDB中的数据
        JavaMongoRDD<Document> rdd = MongoSpark.load(jsc);
        //在数据中有同一个用户对不同的小视频进行评价，需要进行合并操作
        JavaRDD<Document> values = rdd.mapToPair(document -> {
            Integer user = document.getLong("userId").intValue();
            Integer product = document.getLong("videoId").intValue();
            return new Tuple2<>(user + "_" + product, document);
        }).reduceByKey((v1, v2) -> {
            Double score = v1.getDouble("score") + v2.getDouble("score");
            v1.put("score", score);
            return v1;
        }).values();

        //得到数据中的用户id集合
        List<Long> userIdList = rdd.map(v1 -> v1.getLong("userId")).distinct().collect();

        String host = REDIS_NODES.split(":")[0];
        int port = Integer.valueOf(REDIS_NODES.split(":")[1]);
        Jedis jedis = new Jedis(host,port);

        for (Long userId : userIdList) {
            List<Document> documents = values.take(10);
            List<Long> products = new ArrayList();
            for (Document document : documents) {
                Long uid = document.getLong("userId");
                Long publishId = document.getLong("publishId");
                if(userId != uid) {
                    products.add(publishId);
                }
            }
            String key = "QUANZI_VIDEO_RECOMMEND_" + userId;
            String value = StringUtils.join(products, ',');
            System.out.println("key="+userId + ",value = " +value);
            jedis.set(key, value);
        }

        if (null != jedis) {
            jedis.close();
        }
        jsc.close();
    }
}
