//
// Source code recreated from a .class file by IntelliJ IDEA
// (powered by Fernflower decompiler)
//

package com.tanhua.spark.mongo;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.bson.Document;
import redis.clients.jedis.Jedis;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;

public class SparkQunaZi {

    private static String MONGODB_HOST = System.getenv("MONGODB_HOST")==null?"192.168.136.160:27017":System.getenv("MONGODB_HOST");
    private static String REDIS_NODES = System.getenv("REDIS_NODES")==null?"192.168.136.160:6379":System.getenv("REDIS_NODES");
    private static final Integer SCHEDULE_PERIOD = 2;

    public SparkQunaZi() {

    }

    public static void main(String[] args) {
        Runnable runnable = () -> {
            try {
                execute();
            } catch (Exception var1) {
                var1.printStackTrace();
            }

        };
        ScheduledExecutorService service = Executors.newSingleThreadScheduledExecutor();
        service.scheduleAtFixedRate(runnable, 0L, (long)SCHEDULE_PERIOD, TimeUnit.MINUTES);

        SparkVideo.executeRecommend();
    }

    public static void execute() throws Exception {

        String mongourl = "mongodb://"+MONGODB_HOST+"/tanhua.publish_score?readPreference=primaryPreferred";

        System.out.println("MONGODB_HOST="+mongourl);
        System.out.println("REDIS_NODES="+REDIS_NODES);


        SparkConf sparkConf = (new SparkConf()).setAppName("SparkQunaZi2").setMaster("local[*]").set("spark.driver.host", "localhost").
                set("spark.mongodb.input.uri", mongourl);

        System.out.println("init");
        JavaSparkContext jsc = new JavaSparkContext(sparkConf);
        System.out.println("init2");
        JavaMongoRDD<Document> mongoRDD = MongoSpark.load(jsc);

        System.out.println("1");
        JavaRDD<Document> rdd = mongoRDD.filter(document -> {
            return document.getLong("publishId") != null;
        });

        System.out.println("2");
        JavaRDD<Document> values = rdd.mapToPair(document -> {
            Long userId = document.getLong("userId");
            Long publishId = document.getLong("publishId");
            return new Tuple2<>(userId + "_" + publishId, document);
        }).reduceByKey((v1, v2) -> {
            double newScore = v1.getDouble("score") + v2.getDouble("score");
            v1.put("score", newScore);
            return v1;
        }).values();


        System.out.println("3");
        List<Long> userIdList = rdd.map((v1) -> {
            return v1.getLong("userId");
        }).distinct().collect();


        String host = REDIS_NODES.split(":")[0];
        int port = Integer.valueOf(REDIS_NODES.split(":")[1]);
        Jedis jedis = new Jedis(host,port);

        for (Long userId : userIdList) {
            List<Document> documents = values.take(10);
            List<Long> products = new ArrayList();
            for (Document document : documents) {
                Long uid = document.getLong("userId");
                Long publishId = document.getLong("publishId");
                if(userId != uid) {
                    products.add(publishId);
                }
            }
            String key = "QUANZI_PUBLISH_RECOMMEND_" + userId;
            String value = StringUtils.join(products, ',');
            System.out.println("key="+userId + ",value = " +value);
            jedis.set(key, value);
        }

        if (null != jedis) {
            jedis.close();
        }
        jsc.close();
    }
}
