package sink

import java.util.Properties

import bean.SensorReading
import org.apache.flink.api.common.serialization.{SimpleStringEncoder, SimpleStringSchema}
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer011, FlinkKafkaProducer011}

/**
  * @Description: TODO QQ1667847363
  * @author: xiao kun tai
  * @date:2021/11/13 23:32
  *
  *
  *                  单词计数流处理
  */
object KafkaSink {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    //file source
    val inputPath: String = "src/main/resources/sensor.txt"
    val fileStream: DataStream[String] = env.readTextFile(inputPath)


    //kafka source
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "192.168.88.106:9092")
    properties.setProperty("group.id", "consumer-group")
    properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("auto.offset.reset", "latest")


    val kafkaStream: DataStream[String] = env
      .addSource(new FlinkKafkaConsumer011[String]("sensor", new SimpleStringSchema(), properties))

    //先转换为特定的类型
    val dataStream: DataStream[String] = kafkaStream.map(data => {
      val arr = data.split(",")
      SensorReading(arr(0), arr(1).toLong, arr(2).toDouble).toString
    })

    dataStream.print()

    dataStream
      .addSink(new FlinkKafkaProducer011[String]("192.168.88.106:9092",
        "sinktest", new SimpleStringSchema()))

    env.execute("kafka sink");


  }

}
