package com.chinasoft.shop

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object Top20Rest {
  def main(args: Array[String]): Unit = {
    // 初始化 SparkSession
    val spark = SparkSession.builder()
      .appName("Top20Rest")
      .master("local[*]")
      .getOrCreate()
    import spark.implicits._

    // JDBC 配置
    val url = "jdbc:mysql://localhost:3306/dazhong?useSSL=false&characterEncoding=utf8&serverTimezone=UTC"
    val props = new java.util.Properties()
    props.setProperty("user", "root")
    props.setProperty("password", "Etestnmm4l!")
    props.setProperty("driver", "com.mysql.cj.jdbc.Driver")

    // 读取点评表
    val reviewDF = spark.read
      .jdbc(url, "dazhong_dianping", props)

    // 计算加权评分
    val weightedDF = reviewDF.withColumn(
      "weighted_rating",
      col("rating") * 0.4 +
        col("rating_env") * 0.2 +
        col("rating_flavor") * 0.2 +
        col("rating_service") * 0.2
    )

    // 计算所有饭店的平均加权评分
    val avgDF = weightedDF.groupBy("restId")
      .agg(avg("weighted_rating").alias("average_rating"))

    // 写入 dazhong_average 表（覆盖写入所有饭店）
    avgDF.write
      .mode("overwrite")
      .jdbc(url, "dazhong_average", props)

    // 打印平均评分最高的前 20 家饭店
    println("===== Top 20 Restaurants by Average Rating =====")
    avgDF.orderBy(desc("average_rating"))
      .limit(20)
      .show(20, truncate = false)

    spark.stop()
  }
}
