package com.yanggu.bigdata.realtime.utils

import cn.hutool.core.convert.Convert
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaProducer, KafkaSerializationSchema}
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerConfig

import java.util.Properties

object KafkaUtil {

  /**
   * 获取Kafka的Source
   *
   * @param topics
   * @param prop
   * @return
   */
  def getKafkaSource(topics: String,
                     prop: Properties): KafkaSource[String] = {
    KafkaSource.builder[String]()
      .setTopics(Convert.toList(classOf[String], topics))
      .setValueOnlyDeserializer(new SimpleStringSchema)
      .setProperties(prop)
      .setStartingOffsets(OffsetsInitializer.latest())
      .build()
  }

  /**
   * 获取Kafka的Source
   *
   * @param topics  主题
   * @param groupId 消费者组id
   * @return
   */
  def getKafkaSource(brokerList: String,
                     topics: String,
                     groupId: String): KafkaSource[String] = {
    val kafkaConsumerProp = new Properties
    kafkaConsumerProp.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
    kafkaConsumerProp.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId)
    kafkaConsumerProp.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true")
    kafkaConsumerProp.setProperty("commit.offsets.on.checkpoint", "true")
    getKafkaSource(topics, kafkaConsumerProp)
  }

  /**
   * 获取Kafka的DataStream
   *
   * @param env
   * @param brokerList
   * @param topics
   * @param groupId
   * @return
   */
  def getKafkaDataStream(env: StreamExecutionEnvironment,
                         brokerList: String,
                         topics: String,
                         groupId: String) = {
    env.fromSource(getKafkaSource(brokerList, topics, groupId),
      WatermarkStrategy.noWatermarks[String](), s"$topics source")
  }

  /**
   * 获取Kafka的Sink
   *
   * @param brokerList
   * @param topic
   * @return
   */
  def getKafkaSink(brokerList: String, topic: String) = new FlinkKafkaProducer[String](brokerList, topic, new SimpleStringSchema)

  /**
   * 获取Kafka的Sink
   *
   * @param brokerList
   * @param serializationSchema 自定义schema, 可以动态设置topic
   * @tparam T
   * @return
   */
  def getKafkaSink[T](brokerList: String,
                      serializationSchema: KafkaSerializationSchema[T]) = {
    val prop = new Properties
    prop.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
    new FlinkKafkaProducer[T]("", serializationSchema, prop, FlinkKafkaProducer.Semantic.EXACTLY_ONCE)
  }

  //拼接 Kafka 相关属性到 DDL//拼接 Kafka 相关属性到 DDL
  def getKafkaDDL(brokerList: String,
                  topic: String,
                  groupId: String): String = {
    s"""
      |WITH (
      |  'connector' = 'kafka',
      |  'topic' = '$topic',
      |  'properties.bootstrap.servers' = '$brokerList',
      |  'properties.group.id' = '$groupId',
      |  'scan.startup.mode' = 'group-offsets',
      |  'format' = 'json'
      |)
      |""".stripMargin
  }

}
