package com.example.edu.mock

import com.example.edu.entity.Answer
import com.google.gson.Gson
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord, RecordMetadata}
import org.slf4j.{Logger, LoggerFactory}

import java.util.Properties

/**
 * @title: KafkaProducerThread
 * @projectName bigdata
 * @description: KafkaProducerThread 多线程生成数据
 * @author leali
 * @date 2022/5/25 0:03
 */
class KafkaProducerThread extends Thread {
  val logger: Logger = LoggerFactory.getLogger(classOf[KafkaProducerThread])

  val props = new Properties()
  props.setProperty("bootstrap.servers", "node1:9092,node2:9092,node3:9092")
  props.setProperty("ack", "1")
  props.setProperty("batch.size", "16384")
  props.setProperty("linger.ms", "5")
  props.setProperty("buffer.memory", "33554432")
  props.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
  props.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")

  val producer: KafkaProducer[String, String] = new KafkaProducer[String, String](props)
  val gson = new Gson()

  override def run(): Unit = {
    while (true) {
      val question: Answer = Simulator.genQuestion()
      val jsonString: String = gson.toJson(question)

      producer.send(new ProducerRecord[String, String]("edu", jsonString),
        (metadata: RecordMetadata, exception: Exception) => {
          if (exception == null) {
            println("当前分区-偏移量：" + metadata.partition() + "-" + metadata.offset() + "\n数据发送成功：" + jsonString)
            logger.info("当前分区-偏移量：" + metadata.partition() + "-" + metadata.offset() + "\n数据发送成功：" + jsonString)
          } else {
            logger.error("数据发送失败：" + exception.getMessage)
          }
        })
      Thread.sleep(300)
    }
  }
}
