package com.yangu.flink.cdc

import cn.hutool.core.convert.Convert
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaProducer, KafkaSerializationSchema}
import org.apache.kafka.clients.producer.ProducerConfig

import java.util.Properties

object KafkaUtil {

  /**
   * 获取Kafka的Source
   *
   * @param topics
   * @param prop
   * @return
   */
  def getKafkaSource(topics: String,
                     prop: Properties) = {
    KafkaSource.builder[String]()
      .setTopics(Convert.toList(classOf[String], topics))
      .setValueOnlyDeserializer(new SimpleStringSchema)
      .setProperties(prop)
      .setStartingOffsets(OffsetsInitializer.latest())
      .build()
  }

  /**
   * 获取Kafka的Sink
   *
   * @param brokerList
   * @param topic
   * @return
   */
  def getKafkaSink(brokerList: String, topic: String) = new FlinkKafkaProducer[String](brokerList, topic, new SimpleStringSchema)

  def getKafkaSink[T](brokerList: String,
                      serializationSchema: KafkaSerializationSchema[T]) = {
    val prop = new Properties
    prop.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
    new FlinkKafkaProducer[T]("", serializationSchema, prop, FlinkKafkaProducer.Semantic.EXACTLY_ONCE)
  }


}
