package com.kingjw.sourceTest

import org.apache.flink.api.common.serialization.{SimpleStringEncoder, SimpleStringSchema}
import org.apache.flink.core.fs.Path
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011

/**
 *
 * @Package: com.kingjw.sourceTest
 * @ClassName: SinkTest
 * @Author: 王坚伟
 * @CreateTime: 2022/1/17 17:20
 * @Description:
 */
object SinkTest {

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val inputStream: DataStream[String] = env.readTextFile("input/sensor.txt")
    // 先转换成样例类类型（简单转换操作）
    val dataStream: DataStream[String] = inputStream
      .map( data => {
        val arr = data.split(",")
        SensorReading(arr(0), arr(1).trim.toLong, arr(2).trim.toDouble).toString
      } )
    dataStream.print()
//    dataStream.addSink(
//      StreamingFileSink.forRowFormat(
//        new Path("output"),
//        new SimpleStringEncoder[SensorReading]()
//      ).build()
//    )
    dataStream.addSink(new FlinkKafkaProducer011[String]("hadoop117:9092","sensor",new SimpleStringSchema()))
    env.execute("file sink test")
  }
}
//case class SensorReading(id: String, timestamp: Long, temperature: Double)