package com.tanhua.spark.mongo;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.mllib.recommendation.MatrixFactorizationModel;
import org.apache.spark.mllib.recommendation.Rating;
import org.bson.Document;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisCluster;
import scala.Tuple2;

import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLOutput;
import java.util.*;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

public class SparkQunaZiBak1 {
    private static String MONGODB_HOST = "192.168.136.160";
    private static String MONGODB_PORT = "27017";
    private static final String MONGODB_DATABASE = "tanhua" ;
    private static final String MONGODB_COLLECTION = "publish_score" ;


    public static void main(String[] args) throws Exception {
        execute();
    }

    public static void execute() throws Exception {

        String mongourl = "mongodb://"+MONGODB_HOST+":"+MONGODB_PORT+"/"+MONGODB_DATABASE+"."+MONGODB_COLLECTION+"?readPreference=primaryPreferred";

        System.out.println("mongourl="+mongourl);

        SparkConf sparkConf = (new SparkConf()).setAppName("SparkQunaZi2").setMaster("local[*]").set("spark.driver.host", "localhost").
                set("spark.mongodb.input.uri", mongourl);

        JavaSparkContext jsc = new JavaSparkContext(sparkConf);

        JavaMongoRDD<Document> mongoRDD = MongoSpark.load(jsc);

        JavaRDD<Document> rdd = mongoRDD.filter(document -> {
            return document.getLong("publishId") != null;
        });

        JavaRDD<Document> values = rdd.mapToPair(document -> {
            Long userId = document.getLong("userId");
            Long publishId = document.getLong("publishId");
            return new Tuple2<>(userId + "_" + publishId, document);
        }).reduceByKey((v1, v2) -> {
            double newScore = v1.getDouble("score") + v2.getDouble("score");
            v1.put("score", newScore);
            return v1;
        }).values();

        List<Long> userIdList = rdd.map((v1) -> {
            return v1.getLong("userId");
        }).distinct().collect();

        JavaPairRDD<Long, Rating> ratings = values.mapToPair((document) -> {
            System.out.println(document);
            Long date = document.getLong("date");
            int userId = document.getLong("userId").intValue();
            int publishId = document.getLong("publishId").intValue();
            Double score = document.getDouble("score");
            Rating rating = new Rating(userId, publishId, score);
            return new Tuple2(date % 10L, rating);
        });
        MLlibRecommend mLlibRecommend = new MLlibRecommend();
        MatrixFactorizationModel bestModel = mLlibRecommend.bestModel(ratings);
        saveToRedis(userIdList, bestModel);
        jsc.close();
    }

    private static void saveToRedis(List<Long> userIdList, MatrixFactorizationModel bestModel) {
        JedisCluster jedisCluster = null;
        Jedis jedis = null;
        Iterator var14 = userIdList.iterator();

        while(var14.hasNext()) {
            Long userId = (Long)var14.next();
            Rating[] recommendProducts = bestModel.recommendProducts(userId.intValue(), 20);
            List<Integer> products = new ArrayList();
            Rating[] var9 = recommendProducts;
            int var10 = recommendProducts.length;

            for(int var11 = 0; var11 < var10; ++var11) {
                Rating product = var9[var11];
                products.add(product.product());
            }

            String key = "QUANZI_PUBLISH_RECOMMEND_" + userId;
            String value = StringUtils.join(products, ',');
            System.out.println(value);
        }
    }
}
