package sink



import bean.SensorReading
import org.apache.flink.api.common.serialization.SimpleStringEncoder
import org.apache.flink.api.scala._
import org.apache.flink.core.fs.Path
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

/**
  * @Description: TODO QQ1667847363
  * @author: xiao kun tai
  * @date:2021/11/13 23:32
  *
  *
  *                  单词计数流处理
  */
object FileSink {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val inputPath: String = "src/main/resources/sensor.txt"
    val inputStream: DataStream[String] = env.readTextFile(inputPath)

    //先转换为特定的类型
    val dataStream: DataStream[SensorReading] = inputStream.map(data => {
      val arr = data.split(",")
      SensorReading(arr(0), arr(1).toLong, arr(2).toDouble)
    })

    dataStream.print()

    //    dataStream.writeAsCsv("src/main/resources/out.txt")

    //推荐
    dataStream.addSink(
      StreamingFileSink.forRowFormat(
      new Path("src/main/resources/out"),
      new SimpleStringEncoder[SensorReading]()
    ).build()
    )
    env.execute("file sink");


  }

}
