package com.study.flink.sink

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer

/**
  * kafka sink demo
  *
  * @author stephen
  * @date 2019-07-22 11:31
  */
object ScalaKafkaSinkDemo {

  def main(args: Array[String]): Unit = {
    // 1 初始化环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    // 2 获取输入  nc -lk 9999
    val dataStream:DataStream[String] = env.socketTextStream("localhost",9999)

    // 3 transformation
    import org.apache.flink.api.scala._
    val wordStream:DataStream[String] = dataStream.flatMap(_.split(" "))

    // 4 输出到sink
    val properties: Properties = new Properties();
    properties.setProperty("bootstrap.servers", "localhost:9092")
    val producer = new FlinkKafkaProducer[String]("test-topic", new SimpleStringSchema(), properties)
    // 将时间写入kafka
    producer.setWriteTimestampToKafka(true)
    wordStream.addSink(producer)

    // 5 启动任务
    env.execute("Kafka sink demo")
  }
}
