import java.sql.Date
import java.text.SimpleDateFormat

import com.mongodb.spark.MongoSpark
import com.mongodb.spark.config.ReadConfig
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * Project: ECommerceRecommendSystem
  * Package: 离线统计推荐模块
  * Description
  *
  * Created by ZhouPeng on 2021/12/24 11:35
  **/
object StatisticsRecommender {

  val MONGODB_RATING_COLLECTION = "Rating"
  //定义存储统计结果的mongo表名
  val RATE_MORE_PRODUCTS = "RateMoreProducts"
  val RATE_MORE_RECENTLY_PRODUCTS = "RateMoreRecentlyProducts"
  val AVERAGE_PRODUCTS = "AverageProducts"

  def main(args: Array[String]): Unit = {

    //基本配置
    val config = Map(
      "spark.cores" -> "local[*]",
      "mongo.uri" -> "mongodb://master:27017/recommender",
      "mongo.db" -> "recommender"
    )

    val sparkConf = new SparkConf().setMaster(config("spark.cores")).setAppName("StatisticsRecommender")
    val sparkSession = SparkSession.builder().config(sparkConf).getOrCreate()

    import sparkSession.implicits._
    implicit val mongoConfig = MongoConfig(config("mongo.uri"), config("mongo.db"))

    //读mongo ratings表数据,该读mongo方式有mongo schema元数据异常，待解决
   /* val ratingDF = sparkSession.read
      .option("uri", mongoConfig.mongoUri)
      .option("collection", MONGODB_RATING_COLLECTION)
      .format("com.mongodb.spark.sql").load()
      .as[Rating]
      .toDF()*/

    val readConfig = ReadConfig(Map(
      "uri" -> mongoConfig.mongoUri,
      "collection" -> MONGODB_RATING_COLLECTION
    ))

    val ratingDF = MongoSpark.load(sparkSession.sparkContext, readConfig)
      .map(
        document => {
          Rating(document.getInteger("userId"), document.getInteger("productId"),
            document.getDouble("score"), document.getLong("timestamp"))
        }
      ).toDF()

    //创建临时视图
    ratingDF.createOrReplaceTempView("ratings")

    //todo spark sql去做不同的统计推荐
    //1.历史热门商品，按评分个数来统计
    val RateMoreProductDF = sparkSession.sql(
      """
        |select productId,count(productId) as count
        |from ratings
        |group by productId order by count desc
      """.stripMargin)
    storeDF2Mongo(RateMoreProductDF, RATE_MORE_PRODUCTS)

    //2.近期热门商品，根据评分时间戳转yyyymm格式去评分个数统计
    val sdf = new SimpleDateFormat("yyyyMM")
    //注册自定义udf
    sparkSession.udf.register("changeDate", (timestamp: Long) => {
      sdf.format(new Date(timestamp * 1000)).toLong
    })
    val rateMoreRecentlyProductsDF = sparkSession.sql(
      """
        |select productId,yearMonth,count(1) as count from
        |(select productId,score,changeDate(timestamp) as yearMonth
        |from ratings) a
        |group by yearMonth,productId order by yearMonth desc,count desc
      """.stripMargin)
    storeDF2Mongo(rateMoreRecentlyProductsDF, RATE_MORE_RECENTLY_PRODUCTS)

    //3.优质商品统计，商品的平均分
    val averageProductsDF = sparkSession.sql(
      """
        |select productId,avg(score) as avg_score
        |from ratings
        |group by productId order by avg_score desc
      """.stripMargin)
    storeDF2Mongo(averageProductsDF, AVERAGE_PRODUCTS)

    //关闭sparkSession
    sparkSession.close()

  }

  /**
    * df写入mongo
    *
    * @param df
    * @param collection_name
    */
  def storeDF2Mongo(df: DataFrame, collection_name: String)(implicit mongoConfig: MongoConfig): Unit = {
    df.write
      .option("uri", mongoConfig.mongoUri)
      .option("collection", collection_name)
      .format("com.mongodb.spark.sql")
      .mode("overwrite")
      .save()
  }
}

/**
  * mongodb配置样例类
  *
  * @param mongoUri mongo连接uri
  * @param mongodb  mongo连接数据库
  */
case class MongoConfig(mongoUri: String, mongodb: String)

/**
  * rating样例类
  * 4867
  * 457976
  * 5.0
  * 1395676800
  */
case class Rating(userId: Int, productId: Int, score: Double, timestamp: Long)