package com.lpssfxy.online

import com.lpssfxy.online.entities.MongoConfig
import com.lpssfxy.online.model.StreamComputeProcessor
import com.lpssfxy.online.utils.AppUtils.{MONGO_DB, MONGO_URI, createKafkaStream, createStreamingContextEnv}
import org.apache.log4j.Logger

object RealtimeRecommenderBackup {
  private val logger = Logger.getLogger(getClass)

  def main(args: Array[String]): Unit = {
    var sc: org.apache.spark.SparkContext = null
    var spark: org.apache.spark.sql.SparkSession = null
    var ssc: org.apache.spark.streaming.StreamingContext = null
    try {
      if (args.length < 1) {
        println("请传入 master 参数，例如：local[*] 或 yarn 等")
        System.exit(1)
      }
      // 从命令行参数中获取 master 的值
      val master = args(0)
      // 1. 准备环境
      logger.info("[CUSTOM] Starting to prepare the environment.")
      val (newSc, newSpark, newSsc) = createStreamingContextEnv("RealtimeRecommender", master)
      sc = newSc
      spark = newSpark
      ssc = newSsc

      // 2. 创建广播变量，将simProductsMatrix广播出去
      logger.info("[CUSTOM] Creating broadcast variable for product similarity matrix.")
      implicit val mongoConfig = MongoConfig(MONGO_URI, MONGO_DB)
      val simProductsMatrixBC = StreamComputeProcessor.createProductRecsBroadcast(spark, sc)
      // 3. 创建KafkaDStream
      logger.info("[CUSTOM] Creating Kafka DStream.")
      val ratingStream = createKafkaStream(ssc)

      // 4. 运行评分流的处理流程
      logger.info("[CUSTOM] Running the rating stream processing.")
      StreamComputeProcessor.runRatingProcessor(ratingStream, simProductsMatrixBC)

      // 5. 启动Spark Streaming
      logger.info("[CUSTOM] Before starting Spark Streaming.")
      ssc.start()
      logger.info("[CUSTOM] After starting Spark Streaming.")
      println(">>>>>> spark streaming started <<<<<<<<<")

      // 6. 等待结束
      ssc.awaitTermination()
    } catch {
      case e: Exception =>
        logger.error("[CUSTOM] An error occurred during the real-time recommendation process.", e)
    } finally {
      if (ssc != null) {
        try {
          ssc.stop(stopSparkContext = true, stopGracefully = true)
          logger.info("[CUSTOM] Spark Streaming context stopped gracefully.")
        } catch {
          case e: Exception =>
            logger.error("[CUSTOM] Failed to stop Spark Streaming context gracefully.", e)
        }
      }
      if (sc != null && !sc.isStopped) {
        sc.stop()
        logger.info("[CUSTOM] SparkContext stopped.")
      }
      if (spark != null) {
        spark.stop()
        logger.info("[CUSTOM] SparkSession stopped.")
      }
    }
  }
}