package als

import org.apache.log4j.{Level, Logger}
import org.apache.spark.ml.evaluation.RegressionEvaluator
import org.apache.spark.ml.recommendation.ALS
import org.apache.spark.sql.{Row, SparkSession}

/**
  * Created by hunter.coder 涛哥  
  * 2019/5/6 16:29
  * 交流qq:657270652
  * Version: 1.0
  * 更多学习资料：https://blog.csdn.net/coderblack/
  * Description:  协同过滤算法  ALS模型训练
  **/
object ALSTrainner {

  def main(args: Array[String]): Unit = {

    Logger.getLogger("org").setLevel(Level.WARN)
    val spark = SparkSession.builder().appName("bayes_dp").master("local").getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._
    import scala.collection.JavaConversions._


    // 加载ui矩阵数据
    val ui = spark.read.parquet("G:\\testdata\\comment\\uimatrix")
        .map({case Row(gid:String,pid:String,score:Double)=>{
          (gid.toDouble,pid.substring(1).toDouble,score)
        }}).toDF("gid","pid","score")
    ui.show(10,false)

    /**
      * +---+---+-----+
      * |gid|pid|score|
      * +---+---+-----+
      * |1  |p03|2.0  |
      * |3  |p05|3.0  |
      * |1  |p02|-2.0 |
      * |2  |p03|3.0  |
      * |1  |p01|7.0  |
      * |2  |p02|4.0  |
      * |2  |p01|4.0  |
      * |3  |p02|1.0  |
      * |3  |p04|2.0  |
      */

    // 构造一个als模型训练器
    val als = new ALS().setUserCol("gid").setItemCol("pid").setRatingCol("score").setMaxIter(10).setRegParam(0.01)

    // 训练模型
    val model = als.fit(ui)
    val ui2 = model.transform(ui)
    ui2.show(10,false)

    // 用测试数据计算模型预测结果的均方差
    val evaluator = new RegressionEvaluator().setLabelCol("score").setPredictionCol("prediction").setMetricName("rmse")
    val rmse = evaluator.evaluate(ui2)
    println(s"均方差为： $rmse")


    // 用模型来做推荐
    val recDF = model.recommendForAllUsers(2)
    recDF.show(10,false)

    /**
      * 均方差为： 0.006444557544368801
      * +---+------------------------------+
      * |gid|recommendations               |
      * +---+------------------------------+
      * |1  |[[1,6.997702], [3,1.9916729]] |
      * |3  |[[5,2.9920387], [4,1.9946926]]|
      * |2  |[[1,3.9933927], [2,3.9902647]]|
      * +---+------------------------------+
      */


    recDF.write.json("G:\\testdata\\comment\\als_rec_result")


    spark.close()
  }


}
