package com.oscar.statistics

import java.text.SimpleDateFormat
import java.util.Date

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

// 定义样例类
case class Rating(userId: Int, bookId: Int, score: Double, timestamp: Int)
case class MySQLConfig( url: String,user: String,password: String )

object StatisticsRecommender {
  // 定义mongodb中存储的表名
  val MYSQL_RATING = "Rating"
  val RATE_MORE_BOOKS = "RateMoreBooks"
  val RATE_MORE_RECENTLY_BOOKS = "RateMoreRecentlyBooks"
  val AVERAGE_BOOKS_SCORE = "AverageBooksScore"


  def main(args: Array[String]): Unit = {
    //定义一些配置信息
    val config = Map(
      "spark.cores" -> "local[*]",
      "mysql.url" -> "jdbc:mysql://localhost:3306/recommender",
      "mysql.user" -> "root",
      "mysql.password" -> "123456"
    )
    // 创建一个spark config
    val sparkConf = new SparkConf().setMaster(config("spark.cores")).setAppName("StatisticsRecommender")
    // 创建spark session
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()
    import spark.implicits._
    // 声明一个隐式的配置对象
    implicit val mysqlConfig =
      MySQLConfig(config.get("mysql.url").get,config.get("mysql.user").get,config.get("mysql.password").get)


    // 加载数据
    val ratingDF: DataFrame = spark.read
      .format("jdbc")
      .option("url", mysqlConfig.url)
      .option("dbtable", MYSQL_RATING)
      .option("user", mysqlConfig.user)
      .option("password", mysqlConfig.password)
      .load()
    // 创建一张叫ratings的临时表
    ratingDF.createOrReplaceTempView("ratings")


    //TODO: 不同的统计推荐结果

    // 1. 历史热门图书，按照评分个数统计，bookId，count
    val rateMoreBooksDF = spark.sql("select bookId, count(bookId) as count from ratings group by bookId order by count desc")
    rateMoreBooksDF.show(truncate = false)
    storeDataInMySQL( rateMoreBooksDF, RATE_MORE_BOOKS )

    // 2. 近期热门图书，把时间戳转换成yyyyMM格式进行评分个数统计，最终得到bookId, count, yearmonth
    //评分时间的格式: 1376496000 ===> 201706
    // 创建一个日期格式化工具
    val simpleDateFormat = new SimpleDateFormat("yyyyMM")
    // 注册UDF，将timestamp转化为年月格式yyyyMM
    spark.udf.register("changeDate", (x: Int)=>simpleDateFormat.format(new Date(x * 1000L)).toInt)
    // 把原始rating数据转换成想要的结构bookId, score, yearmonth
    val ratingOfYearMonthDF = spark.sql("select bookId, score, changeDate(timestamp) as yearmonth from ratings")
    ratingOfYearMonthDF.createOrReplaceTempView("ratingOfMonth")
    val rateMoreRecentlyProductsDF = spark.sql("select bookId, count(bookId) as count, yearmonth from ratingOfMonth group by yearmonth, bookId order by yearmonth desc, count desc")
   rateMoreRecentlyProductsDF.show(truncate = false)
    // 把df保存到mongodb
    storeDataInMySQL( rateMoreRecentlyProductsDF, RATE_MORE_RECENTLY_BOOKS )

    // 3. 优质图书统计，图书的平均评分，productId，avg
    val averageProductsDF = spark.sql("select bookId, avg(score) as avg from ratings group by bookId order by avg desc")
    averageProductsDF.show(truncate = false)
    storeDataInMySQL( averageProductsDF, AVERAGE_BOOKS_SCORE )


    // 关闭Spark
    spark.stop()
  }

  def storeDataInMySQL(rateMoreBooksDF: DataFrame, form_name: String)(implicit mysqlConfig: MySQLConfig) = {
    rateMoreBooksDF.write
      .mode("overwrite")
      .format("jdbc")
      .option("url", mysqlConfig.url)
      .option("dbtable", form_name)
      .option("user", mysqlConfig.user)
      .option("password", mysqlConfig.password)
      .save()
  }

}
