package com.shujia.sink

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.connector.kafka.sink.{KafkaRecordSerializationSchema, KafkaSink}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

object Demo3kafkaSInk {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val studentDS: DataStream[String] = env.readTextFile("data/students.txt")

    /**
      * kafka sink 将数据写入kafka
      *
      */
    val kafaSInk: KafkaSink[String] = KafkaSink.builder()
      //kafka集群地址列表
      .setBootstrapServers("master:9092")
      .setRecordSerializer(
        KafkaRecordSerializationSchema.builder()
          //指定写入的topic
          .setTopic("student")
          //指定序列化的类
          .setValueSerializationSchema(new SimpleStringSchema())
          .build()
      )
      .build()


    /**
      * addSInk :老版本
      * sinkTo： 新版本加的
      *
      */
    studentDS.sinkTo(kafaSInk)


    env.execute()

  }

}
