package com.shujia.kafka

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer

import java.util.Properties

object Demo03KafkaSink {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val stuDS: DataStream[String] = env.readTextFile("Flink/data/stu/students.txt")

    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")

    /**
     * kafka-topics.sh --zookeeper master:2181 --topic stus --create --partitions 1 --replication-factor 1
     *
     */
    val kafkaProducer: FlinkKafkaProducer[String] = new FlinkKafkaProducer[String](
      "stus", // target topic
      new SimpleStringSchema(), // serialization schema
      properties) // fault-tolerance


    stuDS.addSink(kafkaProducer)

    env.execute()


  }

}
