package com.yanggu.flink.datastream_api.util

import cn.hutool.core.convert.Convert
import cn.hutool.json.JSONUtil
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.serialization.{SerializationSchema, SimpleStringSchema}
import org.apache.flink.connector.kafka.sink.{KafkaRecordSerializationSchema, KafkaSink, TopicSelector}
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.api.scala._
import org.apache.kafka.clients.consumer.OffsetResetStrategy

import java.nio.charset.StandardCharsets
import java.util.Properties

object KafkaUtil {

  /**
   * 获取Kafka的Source
   *
   */
  def getKafkaSource(brokers: String,
                     topics: String,
                     groupId: String,
                     prop: Properties = new Properties()) = {
    KafkaSource.builder[String]()
      .setBootstrapServers(brokers)
      .setTopics(Convert.toList(classOf[String], topics))
      .setGroupId(groupId)
      .setValueOnlyDeserializer(new SimpleStringSchema)
      .setProperties(prop)
      .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.EARLIEST))
      .build()
  }

  /**
   * 从Kafka中读取数据, 最后转换成DataStream[String]
   *
   * @param env
   * @param brokers
   * @param topics
   * @param groupId
   * @param prop
   * @return
   */
  def getKafkaDataStream(env: StreamExecutionEnvironment,
                         brokers: String,
                         topics: String,
                         groupId: String,
                         prop: Properties = new Properties()) = {
    env.fromSource(getKafkaSource(brokers, topics, groupId, prop), WatermarkStrategy.noWatermarks(), s"Kafka $brokers Source")
  }

  /**
   * 获取Kafka的Sink
   *
   * @param bootstrapServers broker的地址
   * @param topic            对应的topic
   * @return
   */
  def getKafkaSink(bootstrapServers: String,
                   topic: String,
                   kafkaProducerConfig: Properties = new Properties()) = {
    KafkaSink
      .builder[String]()
      .setBootstrapServers(bootstrapServers)
      .setRecordSerializer(
        KafkaRecordSerializationSchema
          .builder()
          .setTopic(topic)
          .setValueSerializationSchema(new SimpleStringSchema())
          .build()
      )
      .setKafkaProducerConfig(kafkaProducerConfig)
      .build()
  }

  /**
   * 动态设置topic的kafka的sink
   *
   * @param bootstrapServers broker地址
   * @param topicSelector    通过数据动态获取topic
   * @tparam T 输入的数据类型
   * @return
   */
  def getKafkaSinkForDynamicTopic[T](bootstrapServers: String,
                                     topicSelector: TopicSelector[T],
                                     kafkaProducerConfig: Properties = new Properties()) = {
    KafkaSink.builder[T]()
      .setBootstrapServers(bootstrapServers)
      .setKafkaProducerConfig(kafkaProducerConfig)
      .setRecordSerializer(
        KafkaRecordSerializationSchema.
          builder()
          .setTopicSelector(topicSelector)
          .setValueSerializationSchema(new SerializationSchema[T]() {
            override def serialize(element: T) = JSONUtil.toJsonStr(element).getBytes(StandardCharsets.UTF_8)
          })
          .build()
      )
      .build()
  }

}
