package com.shujia.flink.sink

import java.util.concurrent.TimeUnit

import org.apache.flink.api.common.serialization.SimpleStringEncoder
import org.apache.flink.core.fs.Path
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

object Demo4StreamingFileSink {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val linesDS: DataStream[String] = env.socketTextStream("master", 8888)


    /**
      * 保存问csv格式
      *
      */
    val sink: StreamingFileSink[String] = StreamingFileSink
      .forRowFormat(new Path("flink/data/streaming_sink"), new SimpleStringEncoder[String]("UTF-8"))
      .withRollingPolicy(
        DefaultRollingPolicy.builder() //滚动策略
          .withRolloverInterval(TimeUnit.MINUTES.toMillis(15)) //它至少包含 15 分钟的数据
          .withInactivityInterval(TimeUnit.MINUTES.toSeconds(5)) //最近 5 分钟没有收到新的记录
          .withMaxPartSize(1024 * 1024 * 1024) //文件大小达到 1GB （写入最后一条记录后）
          .build())
      .build()



    linesDS.addSink(sink)

    env.execute()

  }

}
