package com.shujia.flink.sink

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer

object Demo5KafkaSink {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val studentsDS: DataStream[String] = env.readTextFile("flink/data/students.txt")


    //创建生产者
    val producer = new FlinkKafkaProducer[String](
      "master:9092", // broker 列表
      "student", // 目标 topic， 如果topic不存在，会自动创建一个分区为1 副本为1的topic
      new SimpleStringSchema) // 序列化 schema


    studentsDS.addSink(producer)


    env.execute()
  }

}
