package spark

import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}

import java.sql.{Connection, DriverManager, PreparedStatement}

object CategoryCount3 {
  //李一鸣
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("CategoryCount").setMaster("local[*]")
    val ssc = new StreamingContext(conf, Seconds(8))
    ssc.sparkContext.setLogLevel("WARN")

    val kafkaParams = Map(
      "bootstrap.servers" -> "192.168.136.128:9092",
      "key.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer",
      "value.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer",
      "group.id" -> "niit",
      "auto.offset.reset" -> "earliest"
    )

    val stream = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Array("orders"), kafkaParams)
    )

    // 解析消息并提取category字段
    val categoryPairs = stream.map(record => {
      val fields = record.value().split("\t")
      val category = fields(0)  // 假设category在第1个字段
      (category, 1L)
    })

    // 按类别分组并统计数量
    val countStream = categoryPairs.reduceByKey(_ + _)
    countStream.print()
    // 将结果写入MySQL
    countStream.foreachRDD { rdd =>
      if (!rdd.isEmpty()) {
        rdd.foreachPartition { partition =>
          var conn: Connection = null
          var stmt: PreparedStatement = null
          try {
            // 创建数据库连接
            val url = "jdbc:mysql://localhost:3306/huel2?useSSL=false&serverTimezone=UTC"
            val user = "root"
            val password = "123456"
            conn = DriverManager.getConnection(url, user, password)

            // 批量插入数据
            conn.setAutoCommit(false)
            partition.foreach { case (category, count) =>
              val sql = "INSERT INTO task3 (category, count) VALUES (?,?)"
              stmt = conn.prepareStatement(sql)
              stmt.setString(1, category)
              stmt.setLong(2, count)
              stmt.executeUpdate()
              stmt.close()
            }
            conn.commit()
          } catch {
            case e: Exception =>
              e.printStackTrace()
              if (conn != null) conn.rollback()
          } finally {
            if (stmt != null) try { stmt.close() } catch { case e: Exception => e.printStackTrace() }
            if (conn != null) try { conn.close() } catch { case e: Exception => e.printStackTrace() }
          }
        }
      }
    }

    ssc.start()
    ssc.awaitTermination()
  }
}