package main.scala.org.huel.dataprocessing

import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.sql.{Row, SaveMode, SparkSession}


import java.sql.{Connection, DriverManager, PreparedStatement, Timestamp}
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
import java.util.Properties // 导入 Properties 类

/**
 * @author Chenghaoyang.
 * @Description
 * 所有学生（attendance）、
 * 按班级（class_attendance）、
 * 按课程（course_attendance）的出勤/缺勤人数，
 * 缺勤概率（absence_prediction）、
 * @date 2025/06/13
 */
object AttendanceStatistics {
  def main(args: Array[String]): Unit = {
    // 创建SparkConf
    val sparkConf = new SparkConf()
      .setAppName("AttendanceStatistics")
      .setMaster("local[*]")  // 本地运行模式，生产环境可以去掉
      .set("spark.streaming.stopGracefullyOnShutdown", "true")

    // 创建SparkSession用于SQL操作
    val spark = SparkSession.builder
      .config(sparkConf)
      .getOrCreate()

    import spark.implicits._

    // 设置日志级别
    spark.sparkContext.setLogLevel("WARN")

    // 创建StreamingContext，批处理时间为5秒
    val ssc = new StreamingContext(spark.sparkContext, Seconds(5))

    // Kafka参数配置
    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "43.143.125.94:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "attendance_statistics_group",
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    // 订阅Kafka主题
    val topics = Array("attendance")

    println(s"\n✓ 正在监听Kafka主题: ${topics.mkString(", ")}")
    println(s"✓ 使用Kafka服务器: 43.143.125.94:9092")
    println(s"✓ 消费者组ID: attendance_statistics_group")

    // 创建直接流
    val stream = KafkaUtils.createDirectStream[String, String](
      ssc,
      PreferConsistent,
      Subscribe[String, String](topics, kafkaParams)
    )

    // 定义数据库连接属性，方便传递给函数
    val dbUrl = "jdbc:mysql://43.143.125.94:3306/attendance_new?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone=Asia/Shanghai"
    val dbProps = new Properties()
    dbProps.setProperty("user", "root")
    dbProps.setProperty("password", "Lwj378$$")
    dbProps.setProperty("driver", "com.mysql.cj.jdbc.Driver")


    // 定义处理逻辑
    stream.foreachRDD { rdd =>
      // 获取偏移量范围用于稍后提交
      val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges

      println(s"收到RDD，包含 ${rdd.count()} 条记录")

      if (!rdd.isEmpty()) {
        // 获取当前时间
        val currentTime = LocalDateTime.now()
        val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
        val formattedTime = currentTime.format(formatter)

        println(s"\n=========================================================")
        println(s"| 出勤统计报告 - ${formattedTime} |")
        println(s"=========================================================")

        // 转换RDD为DataFrame
        val attendanceDF = rdd.map(record => record.value())
          .map { line =>
            val fields = line.split("\t")
            if (fields.length == 6) {
              try { // 增加try-catch处理score和is_absent的数字转换错误
                (
                  fields(0),     // class_id
                  fields(1),     // student_name
                  fields(2),     // course_name
                  fields(3),     // student_id
                  fields(4).toInt, // score
                  fields(5).toInt  // is_absent
                )
              } catch {
                case e: NumberFormatException =>
                  println(s"Warning: Skipping malformed record due to non-integer score/is_absent: $line - ${e.getMessage}")
                  ("Unknown", "Unknown", "Unknown", "Unknown", -1, -1) // 返回标记值
              }
            } else {
              // 处理格式不正确的数据，返回默认值
              println(s"Warning: Skipping malformed record due to incorrect field count: $line")
              ("Unknown", "Unknown", "Unknown", "Unknown", -1, -1)
            }
          }
          .filter(_._5 >= 0) // 过滤掉格式不正确的数据 (标记值为-1的)
          .toDF("class_id", "student_name", "course_name", "student_id", "score", "is_absent")

        // 注册为临时视图，用于SQL查询
        attendanceDF.createOrReplaceTempView("attendance_temp")

        // 使用Spark SQL统计出勤和缺勤数量
        val overallStats = spark.sql("""
          SELECT
            SUM(CASE WHEN is_absent = 0 THEN 1 ELSE 0 END) AS present_count,
            SUM(CASE WHEN is_absent = 1 THEN 1 ELSE 0 END) AS absent_count,
            COUNT(*) AS total_count
          FROM attendance_temp
        """)

        println("\n===== 总体出勤情况统计 =====")
        overallStats.show()

        // 按班级统计 - 符合class_attendance表格式
        val classStats = spark.sql("""
          SELECT
            class_id,
            SUM(CASE WHEN is_absent = 0 THEN 1 ELSE 0 END) AS present_count,
            SUM(CASE WHEN is_absent = 1 THEN 1 ELSE 0 END) AS absent_count
          FROM attendance_temp
          GROUP BY class_id
          ORDER BY class_id
        """)

        println("\n===== 班级出勤情况统计 =====")
        classStats.show()

        // 按课程统计 - 符合course_attendance表格式
        val courseStats = spark.sql("""
          SELECT
            course_name,
            SUM(CASE WHEN is_absent = 0 THEN 1 ELSE 0 END) AS present_count,
            SUM(CASE WHEN is_absent = 1 THEN 1 ELSE 0 END) AS absent_count
          FROM attendance_temp
          GROUP BY course_name
          ORDER BY course_name
        """)

        println("\n===== 课程出勤情况统计 =====")
        courseStats.show()

        // 统计经常缺勤的学生 - 用于absence_prediction表
        val absenteeStudents = spark.sql("""
          SELECT
            student_id,
            course_name,
            COUNT(*) AS record_count,
            SUM(is_absent) AS absent_count,
            -- FIX: 显式将计算结果CAST为DOUBLE类型
            CAST((SUM(is_absent) * 100.0 / COUNT(*)) AS DOUBLE) AS absence_probability
          FROM attendance_temp
          GROUP BY student_id, course_name
          HAVING SUM(is_absent) > 0
          ORDER BY absence_probability DESC
        """)

        println("\n===== 需要关注的缺勤学生 =====")
        absenteeStudents.show(10)

        // 计算用户-课程矩阵数据
        val userCourseMatrix = spark.sql("""
          SELECT
            student_id,
            course_name,
            -- FIX: 显式将计算结果CAST为DOUBLE类型
            CAST(((COUNT(*) - SUM(is_absent)) * 100.0 / COUNT(*)) AS DOUBLE) AS attendance_rate
          FROM attendance_temp
          GROUP BY student_id, course_name
        """)

        println("\n===== 用户-课程矩阵数据 =====")
        userCourseMatrix.show(10)

        // 保存数据到MySQL
        try {
          // 1. 保存原始数据到attendance表
          // 使用 foreachPartition 进行批量插入，减少连接开销
          attendanceDF.foreachPartition { (partition: Iterator[Row]) =>
            if (partition.nonEmpty) {
              var connection: Connection = null
              var statement: PreparedStatement = null
              try {
                connection = createConnection(dbUrl, dbProps) // 使用通用的连接创建函数
                connection.setAutoCommit(false) // 开始事务
                statement = connection.prepareStatement(
                  """
                    |INSERT INTO attendance
                    |(class_id, student_name, course_name, student_id, score, is_absent)
                    |VALUES (?, ?, ?, ?, ?, ?)
                  """.stripMargin)

                partition.foreach { row =>
                  statement.setString(1, row.getAs[String]("class_id"))
                  statement.setString(2, row.getAs[String]("student_name"))
                  statement.setString(3, row.getAs[String]("course_name"))
                  statement.setString(4, row.getAs[String]("student_id"))
                  statement.setInt(5, row.getAs[Int]("score"))
                  statement.setInt(6, row.getAs[Int]("is_absent"))
                  statement.addBatch()
                }

                statement.executeBatch()
                connection.commit() // 提交事务
              } catch {
                case e: Exception =>
                  println(s"保存原始数据时出错: ${e.getMessage}")
                  if (connection != null) connection.rollback() // 事务回滚
                  e.printStackTrace()
                  throw e // 重新抛出异常，使Spark任务失败，可以触发重试
              } finally {
                if (statement != null) statement.close()
                if (connection != null) connection.close()
              }
            }
            () // foreachPartition需要返回Unit
          }

          // 2. 更新总体出勤统计到attendance_summary表
          // collect() 数据量小，可以直接在Driver端处理
          val overallRow = overallStats.collect()(0)
          val totalPresent = overallRow.getAs[Long]("present_count")
          val totalAbsent = overallRow.getAs[Long]("absent_count")

          updateAttendanceSummary(totalPresent, totalAbsent, dbUrl, dbProps) // 传入连接信息

          // 3. 更新班级出勤统计到class_attendance表
          classStats.foreachPartition { (partition: Iterator[Row]) =>
            if (partition.nonEmpty) {
              var connection: Connection = null
              var statement: PreparedStatement = null
              try {
                connection = createConnection(dbUrl, dbProps)
                connection.setAutoCommit(false)
                statement = connection.prepareStatement(
                  """
                    |INSERT INTO class_attendance
                    |(class_id, present_count, absent_count, last_updated)
                    |VALUES (?, ?, ?, NOW())
                    |ON DUPLICATE KEY UPDATE
                    |present_count = present_count + VALUES(present_count),
                    |absent_count = absent_count + VALUES(absent_count),
                    |last_updated = NOW()
                  """.stripMargin)

                partition.foreach { row =>
                  statement.setString(1, row.getAs[String]("class_id"))
                  statement.setLong(2, row.getAs[Long]("present_count"))
                  statement.setLong(3, row.getAs[Long]("absent_count"))
                  statement.addBatch()
                }

                statement.executeBatch()
                connection.commit()
              } catch {
                case e: Exception =>
                  println(s"更新班级出勤统计时出错: ${e.getMessage}")
                  if (connection != null) connection.rollback()
                  e.printStackTrace()
                  throw e
              } finally {
                if (statement != null) statement.close()
                if (connection != null) connection.close()
              }
            }
            ()
          }

          // 4. 更新课程出勤统计到course_attendance表
          courseStats.foreachPartition { (partition: Iterator[Row]) =>
            if (partition.nonEmpty) {
              var connection: Connection = null
              var statement: PreparedStatement = null
              try {
                connection = createConnection(dbUrl, dbProps)
                connection.setAutoCommit(false)
                statement = connection.prepareStatement(
                  """
                    |INSERT INTO course_attendance
                    |(course_name, present_count, absent_count, last_updated)
                    |VALUES (?, ?, ?, NOW())
                    |ON DUPLICATE KEY UPDATE
                    |present_count = present_count + VALUES(present_count),
                    |absent_count = absent_count + VALUES(absent_count),
                    |last_updated = NOW()
                  """.stripMargin)

                partition.foreach { row =>
                  statement.setString(1, row.getAs[String]("course_name"))
                  statement.setLong(2, row.getAs[Long]("present_count"))
                  statement.setLong(3, row.getAs[Long]("absent_count"))
                  statement.addBatch()
                }

                statement.executeBatch()
                connection.commit()
              } catch {
                case e: Exception =>
                  println(s"更新课程出勤统计时出错: ${e.getMessage}")
                  if (connection != null) connection.rollback()
                  e.printStackTrace()
                  throw e
              } finally {
                if (statement != null) statement.close()
                if (connection != null) connection.close()
              }
            }
            ()
          }

          // 5. 更新可能缺勤的学生到absence_prediction表
          // 注意：这里的 rankOrder 是在每个 partition 内部从1开始计数的。
          // 如果需要全局的 rankOrder，需要先将数据写入一个临时表或文件，再进行全局排序处理。
          absenteeStudents.foreachPartition { (partition: Iterator[Row]) =>
            if (partition.nonEmpty) {
              var connection: Connection = null
              var statement: PreparedStatement = null
              var partitionRankOrder = 1 // 记录当前分区内的排名

              try {
                connection = createConnection(dbUrl, dbProps)
                connection.setAutoCommit(false)
                statement = connection.prepareStatement(
                  """
                    |INSERT INTO absence_prediction
                    |(student_id, course_name, probability, rank_order, last_updated)
                    |VALUES (?, ?, ?, ?, NOW())
                    |ON DUPLICATE KEY UPDATE
                    |probability = VALUES(probability),
                    |rank_order = VALUES(rank_order),
                    |last_updated = NOW()
                  """.stripMargin)

                partition.foreach { row =>
                  val studentId = row.getAs[String]("student_id")
                  val courseName = row.getAs[String]("course_name")
                  // 现在这里获取的是Double类型，不再是BigDecimal
                  val probability = row.getAs[Double]("absence_probability")

                  statement.setString(1, studentId)
                  statement.setString(2, courseName)
                  statement.setDouble(3, probability)
                  statement.setInt(4, partitionRankOrder) // 使用分区内的排名
                  statement.addBatch()

                  partitionRankOrder += 1
                }

                statement.executeBatch()
                connection.commit()
              } catch {
                case e: Exception =>
                  println(s"更新缺勤预测数据时出错: ${e.getMessage}")
                  if (connection != null) connection.rollback()
                  e.printStackTrace()
                  throw e
              } finally {
                if (statement != null) statement.close()
                if (connection != null) connection.close()
              }
            }
            ()
          }

          // 6. 更新用户-课程矩阵数据
          userCourseMatrix.foreachPartition { (partition: Iterator[Row]) =>
            if (partition.nonEmpty) {
              var connection: Connection = null
              var statement: PreparedStatement = null
              try {
                connection = createConnection(dbUrl, dbProps)
                connection.setAutoCommit(false)
                statement = connection.prepareStatement(
                  """
                    |INSERT INTO user_course_matrix
                    |(student_id, course_name, attendance_rate, last_updated)
                    |VALUES (?, ?, ?, NOW())
                    |ON DUPLICATE KEY UPDATE
                    |attendance_rate = VALUES(attendance_rate),
                    |last_updated = NOW()
                  """.stripMargin)

                partition.foreach { row =>
                  statement.setString(1, row.getAs[String]("student_id"))
                  statement.setString(2, row.getAs[String]("course_name"))
                  // 现在这里获取的是Double类型，不再是BigDecimal
                  statement.setDouble(3, row.getAs[Double]("attendance_rate"))
                  statement.addBatch()
                }

                statement.executeBatch()
                connection.commit()
              } catch {
                case e: Exception =>
                  println(s"更新用户-课程矩阵数据时出错: ${e.getMessage}")
                  if (connection != null) connection.rollback()
                  e.printStackTrace()
                  throw e
              } finally {
                if (statement != null) statement.close()
                if (connection != null) connection.close()
              }
            }
            ()
          }

          println("\n✓ 数据已成功保存到MySQL数据库")
        } catch {
          case e: Exception =>
            println(s"\n✗ 保存数据到MySQL时出错: ${e.getMessage}")
            e.printStackTrace()
          // 在 Driver 端捕获的异常，通常不 re-throw，否则会停止应用。
          // Task 级别的异常已经在 foreachPartition 内部 re-throw 了。
        }

        // 手动提交Kafka偏移量，确保数据处理成功后再提交
        stream.asInstanceOf[CanCommitOffsets].commitAsync(offsetRanges)
        println(s"✓ Kafka offsets committed for batch processing ${offsetRanges.map(_.toString).mkString(", ")}")

      } else {
        println("收到空RDD，跳过处理和提交")
      }
    }

    // 启动Streaming Context
    ssc.start()
    println("\n✓ Spark Streaming统计程序已启动，正在监听Kafka数据...")
    ssc.awaitTermination()
    println("Spark Streaming context terminated.")
  }

  /**
   * 更新总体出勤统计
   * 接受连接参数
   */
  def updateAttendanceSummary(totalPresent: Long, totalAbsent: Long, dbUrl: String, dbProps: Properties): Unit = {
    var connection: Connection = null
    var statement: PreparedStatement = null

    try {
      connection = createConnection(dbUrl, dbProps) // 使用通用的连接创建函数
      statement = connection.prepareStatement(
        """
          |INSERT INTO attendance_summary (total_present, total_absent, last_updated)
          |VALUES (?, ?, NOW())
          |ON DUPLICATE KEY UPDATE
          |total_present = total_present + VALUES(total_present),
          |total_absent = total_absent + VALUES(total_absent),
          |last_updated = NOW()
        """.stripMargin)

      statement.setLong(1, totalPresent)
      statement.setLong(2, totalAbsent)
      statement.executeUpdate()
    } catch {
      case e: Exception =>
        println(s"更新总体出勤统计时出错: ${e.getMessage}")
        e.printStackTrace()
      // Driver 端的错误，通常不 re-throw
    } finally {
      if (statement != null) statement.close()
      if (connection != null) connection.close()
    }
  }

  /**
   * 创建MySQL数据库连接
   * 接受连接URL和属性
   */
  def createConnection(dbUrl: String, dbProps: Properties): Connection = {
    // 在现代JDBC驱动中，Class.forName 通常不是必需的，DriverManager 会自动发现
    // Class.forName("com.mysql.cj.jdbc.Driver")
    DriverManager.getConnection(dbUrl, dbProps)
  }
}