package com.yanggu.flink.datastream_api.sink

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.connector.kafka.sink.{KafkaRecordSerializationSchema, KafkaSink}
import org.apache.flink.streaming.api.scala._

/**
 * 把flink的DataStream数据输出到kafka中
 */
object KafkaSinkDemo {

  def main(args: Array[String]): Unit = {
    val environment = StreamExecutionEnvironment.getExecutionEnvironment

    val kafkaSink = KafkaSink.builder[String]()
      .setBootstrapServers("localhost:9092")
      .setRecordSerializer(KafkaRecordSerializationSchema.builder()
        .setTopic("test-kafka-sink")
        .setValueSerializationSchema(new SimpleStringSchema())
        .build()
      )
      .build()

    //val kafkaSink = KafkaUtil.getKafkaSink("localhost:9092", "test-kafka-sink")

    environment
      .fromCollection(Seq(1, 2, 3, 4))
      .map(data => (data * 2).toString)
      .sinkTo(kafkaSink)

    environment.execute("KafkaSinkDemo Job")

  }

}
