package cn.darksoul3.spark.als

import org.apache.spark.ml.classification.LogisticRegression
import org.apache.spark.ml.evaluation.{MulticlassClassificationEvaluator, RegressionEvaluator}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import org.apache.spark.ml.recommendation.{ALS, ALSModel}
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema

import java.util

class ALSTest {

  def main(args: Array[String]): Unit = {

    val ss: SparkSession = SparkSession.builder().master("local").appName("als").getOrCreate();
    val lines: Dataset[String] = ss.read.textFile("behavior.csv")

    import ss.implicits._
    val rating: Dataset[RatingData] = lines.map(line => {
      parseRating(line)
    })

    val Array(training,test) = rating.randomSplit(Array(0.8, 0.2))

    val als = new ALS()
      .setMaxIter(10)//迭代次数
      .setRank(5)//特征数
      .setRegParam(0.01)//防止过拟合
      .setUserCol("userId")
      .setItemCol("shopId")
      .setRatingCol("rating")

    //training
    val aLSModel: ALSModel = als.fit(training)
    aLSModel.save("")

    val predictions: DataFrame = aLSModel.transform(test)

    val rmse = new RegressionEvaluator()
      .setMetricName("rmse")
      .setLabelCol("rating")
      .setPredictionCol("prediction")
      .evaluate(predictions)

    //predict
    val users: Dataset[Row] = rating.select(aLSModel.getUserCol).distinct().limit(5)
    val userRecs: DataFrame = aLSModel.recommendForUserSubset(users, 20)

    userRecs.foreachPartition(it=>{
      //insert database
      it.foreach(r=>{
        val userId: Int = r.getInt(0)
        val recommendationList: util.List[GenericRowWithSchema] = r.getList(1)
        val shopIds: List[Int] = List()
        recommendationList.forEach(row=>{
          val shopId: Int = row.getInt(0)
          shopIds :+ shopId
        })
      })
    })


    //LR
    val lr: LogisticRegression = new LogisticRegression()
      .setMaxIter(10)
      .setRegParam(0.3)
      .setElasticNetParam(0.8)
      .setFamily("multinomial")

    val model = lr.fit(training)
    model.save("")

    val lrPredictions: DataFrame = model.transform(test)
    val evaluator = new MulticlassClassificationEvaluator()
    val accuracy = evaluator.setMetricName("accuracy").evaluate(lrPredictions)

    println(accuracy)

    ss.stop()


  }

  def parseRating(line: String): RatingData = {
    val rating: Array[String] = line.replaceAll("\"", "").split(",")
    RatingData(rating(0).toInt, rating(1).toInt, rating(2).toInt)
  }

  case class RatingData(userId: Int, movieId: Int, rating: Int)

}
