package com.edata.bigdata.flink;

import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.functions.sink.filesystem.OutputFileConfig;
import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.DateTimeBucketAssigner;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.OnCheckpointRollingPolicy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.flink.connector.file.sink.FileSink;


import java.nio.charset.StandardCharsets;
import java.util.Properties;
import java.time.ZoneId;

public class StreamSink {
    public Logger logger = LoggerFactory.getLogger(this.getClass());


    public enum FileType {
        PARQUET, CSV
    }

    public FileSink<String> createParquetSink(String path, Properties sinkProps) {
        OutputFileConfig config = OutputFileConfig
                .builder()
                .withPartPrefix(sinkProps.getProperty("sink.file.prefix"))
                .withPartSuffix(sinkProps.getProperty("sink.file.suffix"))
                .build();

        return FileSink.<String>forRowFormat(new Path(path), (element, stream) -> {
                    stream.write((element + "\n").getBytes(StandardCharsets.UTF_8));
                })
                .withBucketAssigner(new DateTimeBucketAssigner<>("yyyy-MM-dd", ZoneId.of("Asia/Shanghai")))
                .withRollingPolicy(OnCheckpointRollingPolicy.build())
                .withOutputFileConfig(config)
                .build();
    }

    public <T> FileSink<T> createCSVSink(String path, Properties sinkProps, Class<T> clazz) {
        OutputFileConfig config = OutputFileConfig
                .builder()
                .withPartPrefix(sinkProps.getProperty("sink.file.prefix"))
                .withPartSuffix(sinkProps.getProperty("sink.file.suffix"))
                .build();
        //使用了EData中的编码器，将每个实体bean转换成一行数据，该类一开始会先写入一行列名。
        CsvSinkEncoder<T> encoder = new CsvSinkEncoder<>(clazz);
        return FileSink.forRowFormat(new Path(path), encoder)
                .withBucketAssigner(new DateTimeBucketAssigner<>("yyyy-MM-dd", ZoneId.of("Asia/Shanghai")))
                .withRollingPolicy(OnCheckpointRollingPolicy.build())
                .withOutputFileConfig(config)
                .build();
    }

    public StreamSink() {
    }
}
