package com.kili.mirco.spark_als.spark;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.mllib.recommendation.ALS;
import org.apache.spark.mllib.recommendation.MatrixFactorizationModel;
import org.apache.spark.mllib.recommendation.Rating;
import scala.Tuple2;

/**
 * @date 19-6-1
 * @auther jackliang
 * @description TODO
 */
public class SparkMeanVarianceEvaluator {

    /**
     * Mean variance calculation ,mainly
     * used to calculate the mean square error,
     * the smaller the error, the better the training effect .
     */
    public static void meanVarianceCalculation() {

        SparkConf sparkConf = new SparkConf().
                setMaster("local[*]").
                setAppName("JavaMeanVarianceEvaluate");

        int rank = 10;//Integer.parseInt(args[1]);
        int iterations = 10;//Integer.parseInt(args[2]);
        //String outputDir = "hdfs://localhost:9000";// args[3];
        int blocks = -1;

        JavaSparkContext sc = new JavaSparkContext(sparkConf);
        JavaRDD<String> lines = sc.textFile("/opt/recommendations.txt");

        JavaRDD<Rating> ratings = lines.map(new ParseRating());

        MatrixFactorizationModel model =
                ALS.train(
                        ratings.rdd(),
                        rank,
                        iterations,
                        0.01,
                        blocks);

        JavaRDD<Tuple2<Object, Object>> userProducts =
                ratings.map(r -> new Tuple2<>(r.user(), r.product()));

        JavaPairRDD<Tuple2<Integer, Integer>, Double> predictions = JavaPairRDD.fromJavaRDD(
                model.predict(JavaRDD.toRDD(userProducts)).toJavaRDD()
                        .map(r -> new Tuple2<>(new Tuple2<>(r.user(), r.product()), r.rating()))
        );
        JavaRDD<Tuple2<Double, Double>> ratesAndPreds = JavaPairRDD.fromJavaRDD(
                ratings.map(r -> new Tuple2<>(new Tuple2<>(r.user(), r.product()), r.rating())))
                .join(predictions).values();
        double MSE = ratesAndPreds.mapToDouble(pair -> {
            double err = pair._1() - pair._2();
            return err * err;
        }).mean();

        System.out.println("spark Mean Squared Error = " + MSE);

        sc.close();
    }
}
