package cn.dfun.sample.flink.apitest
import org.apache.flink.api.common.serialization.SimpleStringEncoder
import org.apache.flink.core.fs.Path
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment

/**
  * 文件Sink
  */
object FileSinkTest {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val inputPath = "C:\\wor\\flink-sample\\src\\main\\resources\\sensor"
    val inputStream= env.readTextFile(inputPath)
    // 包装成样例类
    val dataStream = inputStream
      .map(data => {
        var arr = data.split(",")
        SensorReading(arr(0), arr(1).toLong, arr(2).toDouble)
      })

    // sink
    dataStream.print() // print也是一种sink
    // 弃用,推荐addSink
//    dataStream.writeAsCsv("C:\\wor\\flink-sample\\src\\main\\resources\\out")
    dataStream.addSink(
      StreamingFileSink.forRowFormat(
        new Path("C:\\wor\\flink-sample\\src\\main\\resources\\out1"),
        new SimpleStringEncoder[SensorReading]()
      ).build()
    )
    env.execute("file sink test")
  }
}
