package com.shujia.flink.sink

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer

object Demo2SinkKafka {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val studentDS: DataStream[String] = env.readTextFile("data/students.txt")

    /**
      * 创建生产者
      */

    val flinkKafkaProducer = new FlinkKafkaProducer[String](
      "master:9092", // broker 列表
      "student", // 目标 topic
      new SimpleStringSchema) // 序列化 schema

    //将数据sink 到kafa中
    studentDS.addSink(flinkKafkaProducer)

    env.execute()
  }
}
