package com.njbdqn.call

import com.njbdqn.datahandler.ALSDataHandler
import com.njbdqn.util.HDFSConnection
import org.apache.spark.mllib.recommendation.{ALS, Rating}
import org.apache.spark.sql.SparkSession

object ALSCall {
  def call(spark:SparkSession) = {
    val df = ALSDataHandler.alsData(spark).cache()
    val alldata = df.rdd.map(row => {
      Rating(row.getAs("uid").toString.toInt,
        row.getAs("gid").toString.toInt,
        row.getAs("score").toString.toFloat)
    })
    val model  = new ALS().setRank(10).setIterations(20).setLambda(0.01).setImplicitPrefs(false).run(alldata)
    //对模型进行测试
    val tj = model.recommendProductsForUsers(30)
    import spark.implicits._
    //将gid和uid转为cust_id和good_id
    val frame = tj.flatMap {
      case (user: Int, ratings: Array[Rating]) =>
        ratings.map { case (rat: Rating) => (user, rat.product, rat.rating) }
    }.toDF("uid", "gid", "score")
    val userTab = ALSDataHandler.userToNum(spark)
    val goodTab = ALSDataHandler.goodToNum(spark)
    val ff = frame
      .join(userTab, Seq("uid"), "inner").join(goodTab, Seq("gid"), "inner")
      .drop("uid", "gid")
    HDFSConnection.writeDataToHDFS("/myshops/dwd_als",ff)
  }
}
