package com.atguigu.api

import org.apache.flink.api.common.serialization.SimpleStringEncoder
import org.apache.flink.core.fs.Path
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink
import org.apache.flink.streaming.api.scala._


/**
 * @description: xxx
 * @time: 2020/6/21 13:10
 * @author: baojinlong
 **/
object KafkaSinkTest {
  def main(args: Array[String]): Unit = {
    val environment: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    // 设置并行度
    environment.setParallelism(1)
    val inputStreamFromFile: DataStream[String] = environment.readTextFile("E:/qj_codes/big-data/FlinkTutorial/src/main/resources/sensor.data")

    // 基本转换操作
    val dataStream: DataStream[String] = inputStreamFromFile
      .map(data => {
        val dataArray: Array[String] = data.split(",")
        SensorReading(dataArray(0), dataArray(1).toLong, dataArray(2).toDouble).toString
      })

    // 设置sink,写入到文件
    dataStream.addSink(StreamingFileSink.forRowFormat(new Path("E:/qj_codes/big-data/FlinkTutorial/src/main/resources/sensor-output.data"), new SimpleStringEncoder[String]("UTF-8")).build())
    // 写入到kafka:主要还是写入到kafka
    // dataStream.addSink(new FlinkKafkaProducer011[String]("localhost:9092", "sinkTestTopic", new SimpleStringSchema))

    environment.execute("sink simple test job")

  }
}
