package Combine

import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import java.sql.DriverManager

object CombinedStuProcessing {
  def main(args: Array[String]): Unit = {
    // Spark配置
    val conf = new SparkConf().setMaster("local[*]").setAppName("CombinedSparkRealTimeProcessingToMySQL")
    val ssc = new StreamingContext(conf, Seconds(2))
    ssc.sparkContext.setLogLevel("error")

    // Kafka消费者参数配置
    val kakaParams = Map[String, Object](
      "bootstrap.servers" -> "192.168.136.128:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "niit",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    // 调用各个数据处理函数
    processStuInfoData(ssc, kakaParams)
    processSemeterSexData(ssc, kakaParams)
    processStuSexcountData(ssc, kakaParams)
    processStateStuData(ssc, kakaParams)

    // 启动流式处理并等待终止
    ssc.start()
    ssc.awaitTermination()
  }

  def processStuInfoData(ssc: StreamingContext, kakaParams: Map[String, Object]): Unit = {
    val topicName = "stuInfo"
    val streamRdd = KafkaUtils.createDirectStream[String, String](
      ssc, PreferConsistent,
      Subscribe[String, String](Array(topicName), kakaParams)
    )

    // 李一鸣（stuClass逻辑）
    val rowRddClass = streamRdd.map(_.value()).map(line => {
      val fields = line.split("\t")
      (fields(0).toInt, if (fields(2).toInt == 0) "female" else "male")
    })

    // 张羿婷（stuSemClass逻辑）
    val rowRddSemClass = streamRdd.map(_.value()).map(line => {
      val fields = line.split("\t")
      (fields(0).toInt, fields(5).toInt, if (fields(2).toInt == 0) "female" else "male")
    })

    //李一鸣 处理stuClass逻辑，将统计结果推送到MySQL数据库
    rowRddClass.foreachRDD(rdd => {
      val genderCount = rdd.map(person => ((person._1, person._2), 1))
        .reduceByKey(_ + _)
      pushToMySQL_stuClass(genderCount, "testClass")
    })

    // 张羿婷 处理stuSemClass逻辑，将统计结果推送到MySQL数据库
    rowRddSemClass.foreachRDD(rdd => {
      val genderCount = rdd.map(person => ((person._1, person._2, person._3), 1))
        .reduceByKey(_ + _)
      pushToMySQL_stuSemClass(genderCount, "testSemclass")
    })
  }
  //--------------孟佳怡------------
  def processSemeterSexData(ssc: StreamingContext, kakaParams: Map[String, Object]): Unit = {
    val topicName = "semeterSex"
    val streamRdd = KafkaUtils.createDirectStream[String, String](
      ssc, PreferConsistent,
      Subscribe[String, String](Array(topicName), kakaParams)
    )

    val rowRdd = streamRdd.map(_.value()).map(line => {
      val fields = line.split(",")
      (fields(0).toInt, fields(1), fields(2).toInt)
    })

    rowRdd.foreachRDD(rdd => {
      val genderCount = rdd.map(person => ((person._1, person._2), person._3))
        .reduceByKey(_ + _)
      pushToMySQL_semeterSex(genderCount, "testsex")
    })
  }
//------------------柳炎炎--------------------
  def processStuSexcountData(ssc: StreamingContext, kakaParams: Map[String, Object]): Unit = {
    val topicName = "stuSexcount"
    val streamRdd = KafkaUtils.createDirectStream[String, String](
      ssc, PreferConsistent,
      Subscribe[String, String](Array(topicName), kakaParams)
    )

    val rowRdd = streamRdd.map(_.value()).map(line => {
      val fields = line.split(":")
      (fields(0), fields(1).toInt)
    })

    rowRdd.foreachRDD(rdd => {
      val genderCount = rdd.map(person => (person._1, person._2))
        .reduceByKey(_ + _)
      pushToMySQL_stuSexcount(genderCount, "test")
    })
  }
  //-----------------------杨茗茹------------------------------
  def processStateStuData(ssc: StreamingContext, kakaParams: Map[String, Object]): Unit = {
    val topicName = "stateStu"
    val streamRdd = KafkaUtils.createDirectStream[String, String](
      ssc, PreferConsistent,
      Subscribe[String, String](Array(topicName), kakaParams)
    )

    val rowRdd = streamRdd.map(_.value()).map(line => {
      val fields = line.split(",")
      (fields(0), fields(1), fields(2).toInt)
    })

    rowRdd.foreachRDD(rdd => {
      val genderCount = rdd.map(person => ((person._1, person._2), person._3))
        .reduceByKey(_ + _)
      pushToMySQL_stateStu(genderCount, "teststate")
    })
  }

  //李一鸣stuClass逻辑的pushToMySQL函数
  def pushToMySQL_stuClass[K](genderCount: org.apache.spark.rdd.RDD[(K, Int)], tableName: String) = {
    genderCount.foreachPartition(partition => {
      val connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/HUEL?characterEncoding=UTF-8&useSSL=false", "root", "123456")
      val statement = connection.createStatement()

      partition.foreach { case (key, count) =>
        val (classId, gender) = key.asInstanceOf[(Int, String)]
        val sql = s"INSERT INTO $tableName (class, gender, count) VALUES ('$classId', '$gender', $count)"
        statement.executeUpdate(sql)
      }

      statement.close()
      connection.close()
    })
  }

  // 张羿婷 中stuSemClass逻辑的pushToMySQL函数
  def pushToMySQL_stuSemClass[K](genderCount: org.apache.spark.rdd.RDD[(K, Int)], tableName: String) = {
    genderCount.foreachPartition(partition => {
      val connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/HUEL?characterEncoding=UTF-8&useSSL=false", "root", "123456")
      val statement = connection.createStatement()

      partition.foreach { case (key, count) =>
        val (classId, semester, gender) = key.asInstanceOf[(Int, Int, String)]
        val sql = s"INSERT INTO $tableName (class, semester, gender, count) VALUES ('$classId', '$semester', '$gender', $count)"
        statement.executeUpdate(sql)
      }

      statement.close()
      connection.close()
    })
  }

  // 孟佳怡 针对processSemeterSexData的pushToMySQL函数
  def pushToMySQL_semeterSex[K](genderCount: org.apache.spark.rdd.RDD[(K, Int)], tableName: String) = {
    genderCount.foreachPartition(partition => {
      val connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/HUEL?characterEncoding=UTF-8&useSSL=false", "root", "123456")
      val statement = connection.createStatement()

      partition.foreach { case (key, count) =>
        val (semester, gender) = key.asInstanceOf[(Int, String)]
        val sql = s"INSERT INTO $tableName (semester, gender, count) VALUES ('$semester', '$gender', $count)"
        statement.executeUpdate(sql)
      }

      statement.close()
      connection.close()
    })
  }

  // 柳炎炎针对processStuSexcountData的pushToMySQL函数
  def pushToMySQL_stuSexcount[K](genderCount: org.apache.spark.rdd.RDD[(K, Int)], tableName: String) = {
    genderCount.foreachPartition(partition => {
      val connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/HUEL?characterEncoding=UTF-8&useSSL=false", "root", "123456")
      val statement = connection.createStatement()

      partition.foreach { case (key, count) =>
        val (gender) = key.asInstanceOf[(String)]
        val sql = s"INSERT INTO $tableName (gender, count) VALUES ('$gender', $count)"
        statement.executeUpdate(sql)
      }

      statement.close()
      connection.close()
    })
  }

  // 杨茗茹针对processStateStuData的pushToMySQL函数
  def pushToMySQL_stateStu[K](genderCount: org.apache.spark.rdd.RDD[(K, Int)], tableName: String) = {
    genderCount.foreachPartition(partition => {
      val connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/HUEL?characterEncoding=UTF-8&useSSL=false", "root", "123456")
      val statement = connection.createStatement()

      partition.foreach { case (key, count) =>
        val (state, gender) = key.asInstanceOf[(String, String)]
        val sql = s"INSERT INTO $tableName (state, gender, count) VALUES ('$state', '$gender', $count)"
        statement.executeUpdate(sql)
      }

      statement.close()
      connection.close()
    })
  }
}