package com.king.util

import java.lang
import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, FlinkKafkaProducer, KafkaSerializationSchema}
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.{ProducerConfig, ProducerRecord}

/**
 * @Author: KingWang
 * @Date: 2022/1/1  
 * @Desc:
 **/
object MyKafkaUtil {


  val broker_list = "hadoop200:9092,hadoop201:9092,hadoop202:9092"
  val default_topic = "DWD_DFFAULT_TOPIC"

  def getKafkaProducer(topic:String):FlinkKafkaProducer[String] =
    new FlinkKafkaProducer[String](broker_list,topic,new SimpleStringSchema())

  def getKafkaProducer[T](kafkaSerializationSchema:KafkaSerializationSchema[T]):FlinkKafkaProducer[T] = {
    val prop = new Properties()
    prop.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, broker_list)
    prop.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 1000 * 60 * 3 + "")
    prop.setProperty(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1")
    prop.setProperty(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true")

    new FlinkKafkaProducer[T](
      default_topic,
      kafkaSerializationSchema,
      prop,
      FlinkKafkaProducer.Semantic.EXACTLY_ONCE)  //一致性：当开启ck时，这一致性才生效
  }

  def getKafkaConsumer(topic:String,groupId:String):FlinkKafkaConsumer[String] = {
    val prop = new Properties()
    prop.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, broker_list)
    prop.put(ConsumerConfig.GROUP_ID_CONFIG, groupId)
    new FlinkKafkaConsumer[String](topic,new SimpleStringSchema(), prop)
  }

}
