package service
import bean.Orders
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.desc
import org.apache.spark.streaming.dstream.DStream
import util.{JBDCUtil, SparkUtil}

/**
 * @author: Yan Tong xue
 * @Created:2024/3/13 9:43
 * @desc:
 */
class RealTimeAnalyse {
  val spark = SparkUtil.takeSpark()
  import spark.implicits._
  val dbProperties =  JBDCUtil.getDbProperties()
  val url = dbProperties.getProperty("url")
  val Properties = dbProperties.getProperty("user")
  def dataAnalysis(orders: DStream[Orders]): Unit = {
    //需求一：热门餐厅Top10
    PopularRestaurantsTop10(orders)
  }

  //需求一：热门餐厅Top10
  def PopularRestaurantsTop10(orders: DStream[Orders]): Unit = {
    val spark: SparkSession = SparkUtil.takeSpark()
    import spark.implicits._

    // 从 DStream 中获取 RDD 并转换为 DataFrame
    val ordersRDD = orders.foreachRDD { rdd =>
      if (!rdd.isEmpty()) {
        val ordersDF = rdd.map(
          order => Orders(order.user_id, order.user_name, order.user_phone, order.store_id, order.province, order.city, order.orders_id, order.create_time, order.orders_score)).toDF()

        // 使用 Spark SQL 计算每个 store_id 的订单数量
        val storeCountsDF = ordersDF.groupBy("store_id").count().orderBy(desc("count"))

        // 获取订单量最多的 store_id
        val topStore = storeCountsDF.limit(1).collect()(0)

        // 将 topStore 写入数据库
        val storeId = topStore.getAs[String]("store_id")
        val count = topStore.getAs[Long]("count")
        val sql = s"INSERT INTO hot_restaurant VALUES('$storeId', $count)"
        val connection = JBDCUtil.getConnection
        JBDCUtil.executeUpdate(connection, sql)
        connection.close()
      }
    }
  }}


