package com.lpssfxy.statistics.utils

import com.lpssfxy.statistics.entities.Rating
import org.apache.spark.sql.{DataFrame, SparkSession}

import java.text.SimpleDateFormat
import java.util.Date

object AppUtils {

  // 定义MongoDB数据库表
  val MONGODB_RATING_COLLECTION = "rating"
  val RATE_MORE_PRODUCTS = "rateMoreProducts"
  val RATE_MORE_RECENTLY_PRODUCTS = "rateMoreRecentlyProducts"
  val AVERAGE_PRODUCTS = "averageProducts"
  // 定义利用DataFrame创建临时表
  val DATAFRAME_TEMP_VIEW = "ratings"

  val configMap = Map(
    "spark.cores" -> "local[*]",
    "mongo.uri" -> "mongodb://fooadmin:123456@s3:27017/bigdata",
    "mongo.db" -> "bigdata"
  )

  def getSparkCores: String = configMap("spark.cores")

  def getMongoUri: String = configMap("mongo.uri")

  def getMongoDb: String = configMap("mongo.db")

  /**
   * 获取spark运行时setMaster对应的运行模式
   * @param args
   * @return
   */
  def getMaster(args: Array[String]): String = {
    // 检查是否传入了 master 参数
    if (args.length < 1) {
      println("请传入 master 参数，例如：local[*] 或 yarn 等")
      System.exit(1)
    }
    // 从命令行参数中获取 master 的值
    args(0)
  }

  /**
   * 读取Rating表数据将其转换DataFrame
   *
   * @param spark
   * @return
   */
  def readRatingAsDF(spark: SparkSession): DataFrame = {
    //加入隐式转换
    import spark.implicits._
    //数据加载进来
    val ratingDF = spark
      .read
      .option("uri", AppUtils.getMongoUri)
      .option("collection", AppUtils.MONGODB_RATING_COLLECTION)
      .format("com.mongodb.spark.sql")
      .load()
      .as[Rating]
      .toDF()
    // 返回
    ratingDF
  }

  /**
   * 统计被评分最多的商品
   *
   * @param spark
   * @param mongoConfig
   */
  def countRateMoreProducts(spark: SparkSession): Unit = {
    val rateMoreProductsDF = spark.sql("select productId, count(productId) as count from ratings group by productId")
    rateMoreProductsDF
      .write
      .option("uri", getMongoUri)
      .option("collection", RATE_MORE_PRODUCTS)
      .mode("overwrite")
      .format("com.mongodb.spark.sql")
      .save()
  }

  /**
   * 统计以月为单位统计商品的评分总数
   *
   * @param spark
   * @param mongoConfig
   */
  def countProductScoreByMonth(spark: SparkSession): Unit = {
    //创建一个日期格式化工具
    val simpleDateFormat = new SimpleDateFormat("yyyyMM")

    //注册一个UDF函数，用于将timestamp装换成年月格式   1260759144000  => 201605
    spark.udf.register("changeDate", (x: Int) => simpleDateFormat.format(new Date(x * 1000L)).toInt)

    // 将原来的Rating数据集中的时间转换成年月的格式
    val ratingOfYearMonth = spark.sql("select productId, score, changeDate(timestamp) as yearmonth from ratings")

    // 将新的数据集注册成为一张表
    ratingOfYearMonth.createOrReplaceTempView("ratingOfMonth")

    val rateMoreRecentlyProducts = spark.sql("select productId, count(productId) as count ,yearmonth from ratingOfMonth group by yearmonth,productId order by yearmonth desc, count desc")

    rateMoreRecentlyProducts
      .write
      .option("uri", getMongoUri)
      .option("collection", RATE_MORE_RECENTLY_PRODUCTS)
      .mode("overwrite")
      .format("com.mongodb.spark.sql")
      .save()
  }

  /**
   * 统计每个商品的平均评分
   *
   * @param spark
   */
  def countAverageProductScore(spark: SparkSession): Unit = {
    val averageProductsDF = spark.sql("select productId, avg(score) as avg from ratings group by productId ")
    averageProductsDF
      .write
      .option("uri", getMongoUri)
      .option("collection", AVERAGE_PRODUCTS)
      .mode("overwrite")
      .format("com.mongodb.spark.sql")
      .save()
  }
}
