package com.atguigu.gmall.realtime.common.util;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.common.bean.TableProcessDwd;
import com.atguigu.gmall.realtime.common.constant.Constant;
import org.apache.doris.flink.cfg.DorisExecutionOptions;
import org.apache.doris.flink.cfg.DorisOptions;
import org.apache.doris.flink.cfg.DorisReadOptions;
import org.apache.doris.flink.deserialization.SimpleListDeserializationSchema;
import org.apache.doris.flink.sink.DorisSink;
import org.apache.doris.flink.sink.writer.serializer.SimpleStringSerializer;
import org.apache.doris.flink.source.DorisSource;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.util.List;
import java.util.Properties;

public class FlinkSinkUtil {

    public static DorisSink<String> getDorisSink(String database, String table) {
        /*Properties properties = new Properties();
        // 上游是 json 写入时，需要开启配置
        properties.setProperty("format", "json");
        properties.setProperty("read_json_by_line", "true");


        DorisSink<String> dorisSink = DorisSink.<String>builder()
                .setDorisReadOptions(DorisReadOptions.builder().build())
                .setDorisExecutionOptions(DorisExecutionOptions.builder()
                        // .enable2PC()
                        // .setLabelPrefix("label-doris") //streamload label prefix
                        .setDeletable(false)
                        .setStreamLoadProp(properties)
                        .build())
                .setSerializer(
                        // JsonDebeziumSchemaSerializer.builder().build()
                        new SimpleStringSerializer()
                ) //serialize according to string
                .setDorisOptions(DorisOptions.builder()
                        .setFenodes(Constant.DORIS_FE_NODES)
                        .setTableIdentifier(database + "." + table)
                        .setUsername(Constant.DORIS_USER_NAME)
                        .setPassword(Constant.DORIS_PASSWORD)
                        .build())
                .build();
        return dorisSink;*/
        // 上游是 json 写入时，需要开启配置
        Properties properties = new Properties();
        properties.setProperty("format", "json");
        properties.setProperty("read_json_by_line", "true");

        DorisSink<String> dorisSink = DorisSink.<String>builder()
                .setDorisReadOptions(DorisReadOptions.builder().build())
                .setDorisExecutionOptions(
                        DorisExecutionOptions.builder()
                                //.enable2PC()
                                //setLabelPrefix("label-doris") //streamload label prefix
                                .setDeletable(false)
                                .setStreamLoadProp(properties)
                                .build()
                )
                .setSerializer(
                        new SimpleStringSerializer()
                )
                .setDorisOptions(
                        DorisOptions.builder()
                                .setFenodes(Constant.DORIS_FE_NODES)
                                .setTableIdentifier(database + "." + table)
                                .setUsername(Constant.DORIS_USER_NAME)
                                .setPassword(Constant.DORIS_PASSWORD)
                                /*.setFenodes("hadoop102:7030")
                                .setTableIdentifier("gmall2024_realtime.dws_traffic_vc_ch_ar_is_new_page_view_window")
                                .setUsername("root")
                                .setPassword("aaaaaa")*/
                                .build()
                )
                .build();

        return dorisSink ;
    }

    public static KafkaSink<String> getKafkaSink(String topic) {
        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
                .setBootstrapServers(Constant.KAFKA_BROKERS)
                .setRecordSerializer(KafkaRecordSerializationSchema.builder()
                        .setTopic(topic)
                        .setValueSerializationSchema(new SimpleStringSchema())
                        .build()
                )
                // DeliveryGuarantee.EXACTLY_ONCE | AT_LEAST_ONCE
                .setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
                // 如果是精确一次, 还需要设置 事务id的前缀, 生产者事务超时时间
                // .setTransactionalIdPrefix("gmall-realtime-"+System.currentTimeMillis())
                // 默认值: Kafka Broker最大的事务超市时间: 15min
                //          Kafka Sink最大的事务超时时间:  1hour
                // 原则:  检查点超时时间Checkpoint<=Kafka Sink最大的事务超时时间 <= Kafka Broker最大的事务超市时间
                // .setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 15 * 1000 * 60 + "" )
                .build();
        return kafkaSink;
    }


    public static KafkaSink<Tuple2<JSONObject, TableProcessDwd>> getKafkaSink() {
        KafkaSink<Tuple2<JSONObject, TableProcessDwd>> kafkaSink = KafkaSink.<Tuple2<JSONObject, TableProcessDwd>>builder()
                .setBootstrapServers(Constant.KAFKA_BROKERS)
                .setRecordSerializer(
                        new KafkaRecordSerializationSchema<Tuple2<JSONObject, TableProcessDwd>>() {
                            @Nullable
                            @Override
                            public ProducerRecord<byte[], byte[]> serialize(Tuple2<JSONObject, TableProcessDwd> element, KafkaSinkContext context, Long timestamp) {
                                //获取 topic
                                String topic = element.f1.getSinkTable();
                                //获取写出的数据
                                byte[] value = element.f0.toJSONString().getBytes();

                                return new ProducerRecord<byte[], byte[]>(topic, value);
                            }
                        }
                )
                //DeliveryGuarantee.EXACTLY_ONCE  |  AT_LEAST_ONCE
                .setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
                //如果是精确一次， 还需要设置 事务id的前缀 、 生产者事务超时时间
                //.setTransactionalIdPrefix("gmall-realtime-" + System.currentTimeMillis())
                // 默认值:  Kafka Broker 最大的事务超时时间:  15min
                //         Kafka Sink 最大的事务超时时间:   1hour
                // 原则:    Checkpoint   <= Kafka Sink  <= Kafka Broker
                //.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 1000 * 60 * 15 + "")
                .build();

        return kafkaSink;
    }
}
