package com.bujunjie.study.realtime.common.util;

import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.bujunjie.study.realtime.common.constant.FlinkConstant;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;

import java.io.IOException;
import java.util.Properties;

/**
 * <p>flink 处理字段的工具类</p>
 *
 * @author bu.junjie
 * @version 1.0.0
 * @createTime 2025/9/12 14:55
 */
public class FlinkSourceUtil {

    /**
     * <p>根据 topic 以及 groupId 获取 kafka 资源对象</p>
     *
     * @param topic   主题
     * @param groupId 消费组
     * @return 返回 kafkaSource 对象
     * @author bu.junjie
     * @date 2025/9/12 14:57
     */
    public static KafkaSource<String> getKafkaSource(String topic, String groupId) {
        KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
                .setBootstrapServers(FlinkConstant.KAFKA_BROKERS)
                .setTopics(topic)
                .setGroupId(groupId)
                //.setProperty(ConsumerConfig.ISOLATION_LEVEL_CONFIG,"read_committed")
                //在生产环境中，一般为了保证消费的精准一次性，需要手动维护偏移量，KafkaSource->KafkaSourceReader->存储偏移量变量
                //.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
                // 从最末尾位点开始消费
                .setStartingOffsets(OffsetsInitializer.latest())
                //注意：如果使用Flink提供的SimpleStringSchema对String类型的消息进行反序列化，如果消息为空，会报错
                //.setValueOnlyDeserializer(new SimpleStringSchema())
                //  自定义 kafka 反序列化工具
                .setValueOnlyDeserializer(
                        new DeserializationSchema<String>() {
                            @Override
                            public String deserialize(byte[] message) throws IOException {
                                if (ObjectUtil.isNull(message)) {
                                    return null;
                                }
                                return new String(message);
                            }

                            @Override
                            public boolean isEndOfStream(String nextElement) {
                                return false;
                            }

                            @Override
                            public TypeInformation<String> getProducedType() {
                                return TypeInformation.of(String.class);
                            }
                        }
                )
                .build();
        return kafkaSource;
    }


    /**
     * <p> 获取 mysql 的配置信息</p>
     *
     * @param database  数据库名
     * @param tableName 表名
     * @return 返回 mysqlSource 对象
     * @author bu.junjie
     * @date 2025/9/12 16:57
     */
    public static MySqlSource<String> getMySqlSource(String database, String tableName) {
        Properties props = new Properties();
        props.setProperty("useSSL", "false");
        props.setProperty("allowPublicKeyRetrieval", "true");
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname(FlinkConstant.MYSQL_HOST)
                .port(FlinkConstant.MYSQL_PORT)
                .databaseList(database)
                .tableList(database + StrUtil.DOT + tableName)
                .username(FlinkConstant.MYSQL_USER_NAME)
                .password(FlinkConstant.MYSQL_PASSWORD)
                .deserializer(new JsonDebeziumDeserializationSchema())
                .startupOptions(StartupOptions.initial())
                .jdbcProperties(props)
                .build();
        return mySqlSource;
    }
}
