package com.developer.sinktest

import com.developer.apitest.SensorReading
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer

object KafkaSinkTest {

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val streamFromFile = env.readTextFile("D:\\idea\\projects\\other\\flink-demo\\src\\main\\resources\\sensor.txt")

    val dataStream = streamFromFile.map( data => {
      val dataArray = data.split(",")
      SensorReading( dataArray(0).trim,dataArray(1).trim.toLong,dataArray(2).trim.toDouble).toString //转成String,方便序列化输出
    })

    //
    //new FlinkKafkaProducer011[String]()
    dataStream.addSink(new FlinkKafkaProducer[String]("localhost:9092","sinkTest",new SimpleStringSchema()))
    dataStream.print()

    //kafka 记得删除 /logs 下的日志文件

    env.execute("kafka sink test")
  }
}
