package financial.elt;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import financial.mock.Config;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.cdc.connectors.mysql.source.MySqlSource;
import org.apache.flink.cdc.connectors.mysql.table.StartupOptions;
import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.sink.TopicSelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


public class MysqlCDCToKafkaAPI {

    public static void main(String[] args) throws Exception {

        //注意不能设置enableCheckpoin，否则因为后续数据源中未设置watermark而导致ck阻塞而无法进行数据处理，进而出现假死现象！
        //StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Configuration configuration = new Configuration();
        configuration.setString("rest.port", "9091");
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(configuration);


        MySqlSource<String> mysqlCdc = MySqlSource.<String>builder()
                .hostname("chdp01")
                .port(3306)
                .username(Config.MYSQL_USER)
                .password(Config.MYSQL_PASSWORD)
                .databaseList("bg2025")
                //.tableList("bg2025.*")
                .tableList("bg2025.*")
                .startupOptions(StartupOptions.latest())
                .serverTimeZone("Asia/Shanghai")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();


        SingleOutputStreamOperator<String> stringDataStreamSource = env.fromSource(mysqlCdc, WatermarkStrategy.noWatermarks(), "test")
                .map(new MapFunction<String, String>() {
                    @Override
                    public String map(String value) throws Exception {
                        if (JSON.isValid(value)) {
                            //return JSON.toJSONString(JSONObject.parseObject(value).remove("before"));
                            JSONObject jsonSrc = JSONObject.parseObject(value);
                            JSONObject json = new JSONObject();
                            json.put("data", jsonSrc.getJSONObject("after"));
                            json.put("table", jsonSrc.getJSONObject("source").getString("table"));
                            return json.toJSONString();
                        }
                        return value;
                    }
                });


        stringDataStreamSource.print().setParallelism(1);

        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
                .setBootstrapServers(Config.KAFKA_SERVER)
                .setRecordSerializer(KafkaRecordSerializationSchema.builder()
                        //.setTopic("mysql_fin_customer_info")
                        .setTopicSelector(new TopicSelector<String>() {
                            @Override
                            public String apply(String s) {
                                if (JSON.isValid(s)) {
                                    String tableName = JSONObject.parseObject(s).getString("table");
                                    return Config.FIN_MYSQL_CDC_PREFIX + tableName;
                                } else {
                                    return Config.DEFAULT_KAFKA_TOPIC;
                                }
                            }
                        })
                        .setValueSerializationSchema(new SimpleStringSchema())
                        .build())
                .setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
                .build();

        stringDataStreamSource.sinkTo(kafkaSink);


        env.execute(Config.FIN_MYSQL_CDC_PREFIX);


    }
}
