package com.learn.util;

import com.learn.commmon.Constant;
import com.learn.serialier.SerializationAian;
import org.apache.doris.flink.cfg.DorisExecutionOptions;
import org.apache.doris.flink.cfg.DorisOptions;
import org.apache.doris.flink.cfg.DorisSink;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.util.Properties;

public class FlinkSinkUtil {

    public static SinkFunction<String> getDorisSink(String tableName) {
        Properties props = new Properties();
        props.setProperty("format", "json");
        props.setProperty("strip_outer_array", "true");
//        props.setProperty("read_json_by_line", "true");
//        props.setProperty("streaming_load_json_max_mb", "4096");

        return DorisSink
                .sink(
                        new DorisExecutionOptions.Builder()
                                .setBatchIntervalMs(30000L)
                                .setBatchSize(1024 * 1024 * 10240)
                                .setEnableDelete(false)
                                .setMaxRetries(3)
                                .setStreamLoadProp(props)
                                .build(),
                        new DorisOptions.Builder()
                                .setFenodes(Constant.DORIS_HOST)
                                .setUsername("admin")
                                .setPassword("Juwan@123")
                                .setTableIdentifier(tableName)
                                .build()
                );
    }

    public static String getDorisSink_SQL(String tableName) {

        return "with(" +
                " 'connector'='doris',  " +
                " 'fenodes' = 'cdh02:9092', "  +
                " 'table.identifier' = 'test_db."+ tableName +"', " +
                " 'username' = 'admin',  " +
                " 'password' = 'Juwan@123' " +
                ")";

    }

    public static FlinkKafkaProducer getKafkaSink(String topic){

        Properties properties = new Properties();
        properties.setProperty("defaultTopic",topic);
        properties.setProperty("bootstrap.servers", "cdh02:9092");
//        properties.setProperty("compression.type", "lz4"); //尝试不同的压缩算法对排序算法的影响
        properties.setProperty("compression.type", "zstd");
        properties.setProperty("max.request.size", "52428800");

        //kafka生产者的精确一次相关
        //todo broker端设置
        properties.setProperty("transactional.id.expiration.ms", "2073600000");

        /** 事务允许的最大超时。如果客户端请求的事务时间超过了这个时间，那么代理将在InitPidRequest中返回一个InvalidTransactionTimeout错误*/
        properties.setProperty("max.transaction.timeout.ms", "900000");


        //todo producer
        properties.setProperty("enable.idempotence", "true");
        properties.setProperty("acks", "all"); //等价于acks = -1
        properties.setProperty("retries", "5");
        properties.setProperty("max.inflight.requests.per.connection", "1");
        properties.setProperty("transaction.timeout.ms", "600000");


        /**
         * FlinkKafkaProducer myProducer = new FlinkKafkaProducer(
         *      “my-topic”, // 目标 topic new SimpleStringSchema(),
         *      // 序列化 schema properties,
         *      // producer 配置 FlinkKafkaProducer.Semantic.
         *      ); // 容错
         */
        FlinkKafkaProducer flinkKafkaProducer = new FlinkKafkaProducer(
                topic,
                new SerializationAian(topic),
                properties,
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE
        );

        return flinkKafkaProducer;
    }

    public static String getKafkaSink_SQL(String topic) {

        return "with(" +
                " 'connector'='kafka',  " +
                " 'properties.bootstrap.servers' = 'cdh02:9092', "  +
                " 'properties.group.id' = ' "+ topic +" ', " +
                " 'scan.startup.mode' = 'earliest-offset',  " +
                " 'format' = 'json' " +
                ")";

    }

}
