package com.millstein.realtime.util;

import com.alibaba.fastjson.JSONObject;
import com.millstein.realtime.bean.TableProcess;
import com.millstein.realtime.common.Constants;
import com.millstein.realtime.sink.PhoenixSink;
import org.apache.doris.flink.cfg.DorisExecutionOptions;
import org.apache.doris.flink.cfg.DorisOptions;
import org.apache.doris.flink.cfg.DorisSink;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

/**
 * @Description
 * @Author tsing
 * @Date 2024-09-27 11:00
 */
public class FlinkSinkUtil {
    /**
     * 获取phoenix sink
     * @return
     */
    public static SinkFunction<Tuple2<JSONObject, TableProcess>> getPhoenixSink() {
        return new PhoenixSink();
    }

    /**
     * 获取kafka sink
     * @param topic 输出的kafka主题
     * @return
     */
    public static SinkFunction<String> getKafkaSink(String topic) {

        Properties props = new Properties();
        props.put("bootstrap.servers", Constants.KAFKA_SERVER);
        props.put("transaction.timeout.ms", 15 * 60 * 1000);

        return new FlinkKafkaProducer<>(
                topic,
                new SerializationSchema(topic),
                props,
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE
        );
    }

    private static class SerializationSchema implements KafkaSerializationSchema<String> {

        private final String topic;

        public SerializationSchema(String topic) {
            this.topic = topic;
        }

        @Override
        public ProducerRecord<byte[], byte[]> serialize(String element, Long timestamp) {
            return new ProducerRecord<>(topic, element.getBytes());
        }
    }

    /**
     * 获取doris-sink
     * @param tableName 写入的表名
     * @return
     */
    public static SinkFunction<String> getDorisSink(String tableName) {
        Properties prop = new Properties();
        prop.setProperty("format", "json");
        prop.setProperty("strip_outer_array", "true");

        return DorisSink.sink(
                DorisExecutionOptions.builder()
                        .setBatchIntervalMs(2000L)
                        .setBatchSize(1024 * 1024)
                        .setEnableDelete(false)
                        .setMaxRetries(3)
                        .setStreamLoadProp(prop)
                        .build(),
                DorisOptions.builder()
                        .setFenodes("hadoop102:7030")
                        .setUsername("root")
                        .setPassword("123456")
                        .setTableIdentifier(tableName)
                        .build()
        );
    }
}
