package com.shujia.flink.sink

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.connector.base.DeliveryGuarantee
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema
import org.apache.flink.connector.kafka.sink.KafkaSink


object Demo4KafkaSInk {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val studentDS: DataStream[String] = env.readTextFile("data/students.txt")

    /**
     * 创建kafka sink
     *
     * topic需要先创建
     * kafka-topics.sh --create --zookeeper master:2181 --replication-factor 1 --partitions 3 --topic student
     *
     */
    val sink: KafkaSink[String] = KafkaSink
      .builder[String]
      //指定kafka集群列表
      .setBootstrapServers("master:9092")
      .setRecordSerializer(
        KafkaRecordSerializationSchema
          .builder[String]
          //指定topic
          .setTopic("student")
          //指定数据的格式
          .setValueSerializationSchema(new SimpleStringSchema())
          .build
      )
      //设置数据处理的语义
      .setDeliverGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
      .build

    //使用kafka sink
    studentDS.sinkTo(sink)

    env.execute()

  }

}
