package org.huel.dataprocessing

import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import org.json.JSONObject

import java.util.concurrent.{Executors, TimeUnit}
import java.util.{Arrays, Properties}
import scala.collection.mutable
import scala.jdk.CollectionConverters.IterableHasAsScala

/**
 * @Description LAndAByName1
 */
object runRDD2 {
  // 定义case class，用于表示考勤数据
  case class Attendance(classId: String, studentName: String, status: String)

  def main(args: Array[String]): Unit = {
    // 配置Spark和SQLContext
    val conf = new SparkConf().setMaster("local[*]").setAppName("StudentsAttendance")
    val sc = new SparkContext(conf)
    sc.setLogLevel("error")

    // Kafka消费者配置
    val consumerProps = new Properties()
    consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "likai:9092") // 替换为你的Kafka Broker地址
    consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer")
    consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer")
    consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "students-attendance-consumer-group")
    consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest")

    val kafkaConsumer = new KafkaConsumer[String, String](consumerProps)
    kafkaConsumer.subscribe(Arrays.asList("attendance"))

    // Kafka生产者配置
    val producerProps = new Properties()
    producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "likai:9092")
    producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    val kafkaProducer = new KafkaProducer[String, String](producerProps)

    // 创建线程池用于调度
    val scheduler = Executors.newScheduledThreadPool(1)

    // 使用Map存储每个班级每个学生的累计出勤和缺勤数量
    val attendanceCounts = mutable.Map[(String, String), (Int, Int)]() // (classId, studentName) -> (presentCount, absentCount)

    // 定义任务
    val task = new Runnable {
      def run(): Unit = {
        val records = kafkaConsumer.poll(1000).asScala
        val data = records
          .map(_.value().split("\t"))
          .map { arr => Attendance(arr(0), arr(1), arr(4)) }
          .toList

        if (data.nonEmpty) {
          // 将数据转为 RDD
          val newRDD = sc.parallelize(data)

          // 统计每个班级每个学生的出勤和缺勤数量
          val classStudentCounts: RDD[((String, String), (Int, Int))] = newRDD
            .map(attendance => ((attendance.classId, attendance.studentName), attendance.status))
            .map {
              case ((classId, studentName), "A") => ((classId, studentName), (1, 0))
              case ((classId, studentName), "L") => ((classId, studentName), (0, 1))
              case ((classId, studentName), _) => ((classId, studentName), (0, 0))   // 其他情况
            }
            .reduceByKey { (a, b) => (a._1 + b._1, a._2 + b._2) }

          // 收集结果到 Driver
          val classStudentCountsMap = classStudentCounts.collect().toMap

          // 更新累计出勤和缺勤数量
          classStudentCountsMap.foreach { case ((classId, studentName), (presentCount, absentCount)) =>
            attendanceCounts.getOrElseUpdate((classId, studentName), (0, 0))
            val (currentPresentCount, currentAbsentCount) = attendanceCounts((classId, studentName))
            attendanceCounts((classId, studentName)) = (currentPresentCount + presentCount, currentAbsentCount + absentCount)
          }

          // 打印和发送结果
          attendanceCounts.foreach { case ((classId, studentName), (presentCount, absentCount)) =>
            // 使用 org.json.JSONObject 构造 JSON 数据
            val jsonData = new JSONObject()
            jsonData.put("classId", classId)
            jsonData.put("studentName", studentName)
            jsonData.put("L", presentCount)
            jsonData.put("A", absentCount)

            // 打印 JSON 格式数据到控制台
            println(jsonData.toString)

            // 创建 Kafka 消息并发送
            val record = new ProducerRecord[String, String]("LAndAByName1", jsonData.toString)
            kafkaProducer.send(record)
          }
        }
      }
    }

    // 调度任务
    scheduler.scheduleAtFixedRate(task, 0, 5, TimeUnit.SECONDS)

    // 确保在程序退出时关闭资源
    sys.ShutdownHookThread {
      kafkaConsumer.close()
      kafkaProducer.close()
      scheduler.shutdown()
      sc.stop()
    }
  }
}