package com.spark.analysis

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object StreamingProcessData {

  //每件商品总销售额
  val orderTotalKey = "bussiness::order::total"
  //总销售额
  val totalKey = "bussiness::order::all"
  //Redis数据库
  val dbIndex = 0

  def main(args: Array[String]): Unit = {
    //1、创建SparkConf对象
    val sparkConf: SparkConf = new SparkConf()
      .setAppName("KafkaStreamingTest")
      .setMaster("local[2]")
    //2、创建SparkContext对象
    val sc = new SparkContext(sparkConf)
    //3、构建StreamingContext对象
    val ssc = new StreamingContext(sc, Seconds(3))
    //4、消息的偏移量就会被写入到checkpoint中
    ssc.checkpoint("./spark-receiver")
    //4、设置Kafka参数
    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "s1:9092,s2:9092,s3:9092", // 1. 修改Windows上hosts文件；2. s1-s3对应IP地址做映射
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "spark-receiver",
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (true: java.lang.Boolean)
    )
    // 声明需要读取消息的主题列表
    val topics = Array("order_info")
    // 实例化kafka stream对象
    val stream = KafkaUtils.createDirectStream[String, String](
      ssc,
      PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )
    // 处理消息，将其组成JSONObject类型
    val events: DStream[JSONObject] = stream.map(record => {
      val json = JSON.parseObject(record.value)
      println(s"解析后的 JSON: ${json.toJSONString}")
      println(s"productId: ${json.getString("productId")}, productPrice: ${json.getLong("productPrice")}")
      json
    })
    //按照productID进行分组统计个数和总价格
    val orders: DStream[(String, Int, Long)] = events
      // 转换为 (productId, (1L, price))：1 用于计数，price 用于累加总价
      .map(x => (x.getString("productId"), (1L, x.getLong("productPrice"))))
      // 过滤无效 productId（非空且非 null）
      .filter { case (productId, _) => productId != null && productId.nonEmpty }
      // 按 productId 聚合（累加计数和总价）
      .reduceByKey { case ((count1, price1), (count2, price2)) =>
        (count1 + count2, price1 + price2)
      }
      // 转换为 (productId, count, totalPrice)
      .map { case (productId, (count, totalPrice)) =>
        (productId, count.toInt, totalPrice)
      }
    // 按照分区写入redis中
    orders.foreachRDD(rdd => {
      rdd.foreachPartition(partition => {
        val jedis = RedisClient.pool.getResource() // 获取连接
        try {
          jedis.select(dbIndex)
          partition.foreach { case (productId, count, totalPrice) =>
            // 打印写入前的关键参数
            println(s"准备写入 Redis - productId: $productId, totalPrice: $totalPrice")
            if (productId != null && productId.nonEmpty) {
              jedis.hincrBy(orderTotalKey, productId, totalPrice) // 仅当 productId 有效时写入
            } else {
              println("警告：productId 为空，跳过哈希表写入")
            }
            jedis.incrBy(totalKey, totalPrice) // 总销售额不受影响
          }
        } catch {
          case e: Exception =>
            println(s"Redis 写入失败: ${e.getMessage}") // 捕获异常（如 NullPointerException）
        } finally {
          jedis.close() // 3.x 版本使用 close 替代 returnResource
        }
      })
    })

    // 启动程序
    ssc.start()
    ssc.awaitTermination()
  }
}