package com.boot.study.sink

import java.util.Properties

import com.boot.study.api.SensorReading
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer011, FlinkKafkaProducer011}


object KafkaSinkTest {
  def main(args: Array[String]): Unit = {
    // 创建执行环境
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    //    // 读取文件
    //    val inputSteam: DataStream[String] = env.readTextFile("D:\\WorkSpace\\idea\\Flink\\src\\main\\resources\\sensor.txt")

    //   kafka生产者执行命令 kafka-console-producer.bat --broker-list 127.0.0.1:9092 --topic sensor
    //   从kafka中读取数据
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "127.0.0.1:9092")
    properties.setProperty("group.id", "consumer-group")
    val inputSteam: DataStream[String] = env.addSource(new FlinkKafkaConsumer011[String]("sensor", new SimpleStringSchema(), properties))

    val dataStream: DataStream[String] = inputSteam.map(data => {
      val arr: Array[String] = data.split(",")
      SensorReading(arr(0), arr(1).toLong, arr(2).toDouble).toString
    })

    dataStream.print("kafka sink")
    dataStream.addSink(new FlinkKafkaProducer011[String]("127.0.0.1:9092", "sinkTest", new SimpleStringSchema()))

    // kafka 消费命令
    // kafka-console-consumer.bat --bootstrap-server 127.0.0.1:9092 --topic sinkTest

    // 执行
    env.execute("kafka sink test")
  }
}
