package com.atguigu.sink;

import com.atguigu.func.ClickSource;
import com.atguigu.pojo.Event;
import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.connector.file.sink.FileSink;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.OutputFileConfig;
import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.DateTimeBucketAssigner;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy;

import java.awt.*;
import java.time.Duration;

/**
 * @Author Mr.Zheng
 * @Date 2023/6/14 20:48
 *
 * 新的API：env.sinkTo(sink)
 * 旧的API: env.addSink(SinkFunction)
 *
 * File connector:
 *      1. FileSource
 *      2. FileSink
 *
 */
public class Flink01_FileSink {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //开启检查点
        env.enableCheckpointing(5000);


        DataStreamSource<Event> ds = env.addSource(new ClickSource());

        // 将流中的数据写入到文件中
        FileSink<Event> fileSink =
                FileSink.<Event>forRowFormat(new Path("output"),new SimpleStringEncoder<>())
                .withRollingPolicy(
                        DefaultRollingPolicy.builder()
                                .withMaxPartSize(MemorySize.parse("10m")) // 文件的最大大小
                                .withRolloverInterval(Duration.ofSeconds(10)) //多久滚动一次
                                .withInactivityInterval(Duration.ofSeconds(5)) // 文件多久没有数据写进来就滚动
                                .build()

                )  //滚动策略
                .withBucketCheckInterval(1000L)  //检查间隔
                .withOutputFileConfig(
                        OutputFileConfig.builder()
                                .withPartPrefix("atgiugu-")
                                .withPartSuffix(".log")
                                .build()
                )
                .withBucketAssigner(
                        new DateTimeBucketAssigner<>("yyyy-MM-dd HH-mm")
                )
                .build();

        // 新的API
        ds.sinkTo(fileSink);

        env.execute();
    }

}
