package SparkStreaming

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import java.util.Properties
import scala.collection.mutable

object AttendanceAndAbsence {
  def createKafkaProducer(brokerList: String): KafkaProducer[String, String] = {
    val props = new Properties()
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer].getName)
    new KafkaProducer[String, String](props)
  }

  def main(args: Array[String]): Unit = {
    // 1. 配置 Spark Streaming
    val conf = new SparkConf().setMaster("local[*]").setAppName("AttendanceAndAbsence")
    val ssc = new StreamingContext(conf, Seconds(2))
    ssc.sparkContext.setLogLevel("ERROR")

    // 2. 配置 Kafka 消费者参数
    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "192.168.23.128:9092", // 替换为你的 Kafka Broker 地址
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "attendance-absence-consumer-group",
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    val topics = Array("attendance123") // 消费者主题

    // 3. 创建 Kafka 数据流
    val kafkaStream = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics, kafkaParams)
    )

    // 4. 数据处理逻辑
    val records = kafkaStream.map(record => record.value())
    val parsedRecords = records.map(line => {
      val parts = line.split("\t")
      (parts(0), parts(4)) // 提取班级号 (classId) 和状态 (status)
    })

    // 初始化累积统计数据结构
    val classAttendanceMap = mutable.Map[String, (Int, Int)]()

    // 累积统计各班级的出勤和缺勤数量
    parsedRecords.foreachRDD(rdd => {
      val stats = rdd.collect()
        .groupBy(_._1) // 按班级号分组
        .mapValues(values => {
          val attendanceCount = values.count(_._2 == "A")
          val absenceCount = values.count(_._2 == "L")
          (attendanceCount, absenceCount)
        })

      // 更新全局累积统计
      stats.foreach { case (classId, (attendance, absence)) =>
        val (currentAttendance, currentAbsence) = classAttendanceMap.getOrElse(classId, (0, 0))
        classAttendanceMap(classId) = (currentAttendance + attendance, currentAbsence + absence)
      }

      // 打印和发送累积统计结果到 Kafka
      val kafkaProducer = createKafkaProducer("192.168.23.128:9092") // 替换为你的 Kafka Broker 地址
      try {
        classAttendanceMap.foreach { case (classId, (totalAttendance, totalAbsence)) =>
          val result = s"班级: $classId, 总出勤数: $totalAttendance, 总缺勤数: $totalAbsence"
          println(result) // 打印结果
          kafkaProducer.send(new ProducerRecord[String, String]("AttendanceAndAbsence", null, result))
        }
      } finally {
        kafkaProducer.close()
      }
    })

    // 5. 启动 StreamingContext
    ssc.start()
    ssc.awaitTermination()
  }
}
