package com.shujia.flink.sink

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.connector.base.DeliveryGuarantee
import org.apache.flink.streaming.api.scala._
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema
import org.apache.flink.connector.kafka.sink.KafkaSink

object Demo6KafkaSInk {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val studentDS: DataStream[String] = env.readTextFile("data/students.json")

    /**
     * 将数据保存到kafka中 --- kafka sink
     *
     * DeliveryGuarantee.EXACTLY_ONCE： 唯一一次
     * DeliveryGuarantee.AT_LEAST_ONCE： 至少一次，默认
     */


    val sink: KafkaSink[String] = KafkaSink
      .builder[String]()
      .setBootstrapServers("master:9092,node1:9092,node2:9092") //broker地址
      .setRecordSerializer(
        KafkaRecordSerializationSchema
          .builder[String]()
          .setTopic("students_json") //topic
          .setValueSerializationSchema(new SimpleStringSchema())
          .build())
      //.setDeliverGuarantee(DeliveryGuarantee.AT_LEAST_ONCE) //唯一一次
      .build()

    //使用kafka sink
    studentDS.sinkTo(sink)


    env.execute()
  }

}
