package com.arch.flink.debezium;

import com.google.common.collect.Lists;
import io.debezium.embedded.Connect;
import io.debezium.engine.ChangeEvent;
import io.debezium.engine.DebeziumEngine;
import io.debezium.engine.DebeziumEngine.RecordCommitter;
import io.debezium.engine.RecordChangeEvent;
import io.debezium.engine.format.Json;
import org.apache.kafka.connect.source.SourceRecord;

import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * @author pizhihui
 * @date 2024-07-05 19:45
 */
public class DebeziumDemo {

    public static void main(String[] args) {



        // Define the configuration for the Debezium Engine with MySQL connector...
        final Properties props = new Properties();
        props.setProperty("name", "engine");

        // connector
        props.setProperty("connector.class", io.debezium.connector.mysql.MySqlConnector.class.getCanonicalName());

        // offset设置
        props.setProperty("offset.storage", org.apache.kafka.connect.storage.FileOffsetBackingStore.class.getCanonicalName());
        props.setProperty("offset.storage.file.filename", "/tmp/debezium/offsets.dat");
        props.setProperty("offset.flush.interval.ms", "60000");
        props.setProperty("offset.flush.timeout.ms", "5000");

        // 自定义 offset 存储
        // props.setProperty("offset.storage.flink.state.value", "");

        // 哪些实例是需要同步的
        props.setProperty("database.whitelist", String.join(",", Lists.newArrayList("hello")));

        /* begin connector properties */
        props.setProperty("database.hostname", "192.168.31.56");
        props.setProperty("database.port", "3306");
        props.setProperty("database.user", "root");
        props.setProperty("database.password", "123456");

        props.setProperty("database.server.id", "85744");
        props.setProperty("database.server.name", "my-app-connector");

        props.setProperty("database.history", io.debezium.relational.history.FileDatabaseHistory.class.getCanonicalName());
        props.setProperty("database.history.file.filename", "/tmp/debezium/storage/dbhistory.dat");

        // Create the engine with this configuration ...
        try (DebeziumEngine<?> engine = DebeziumEngine.create(Connect.class)
                .using(props)

                // 直接打印处理
//                .notifying(record -> {
//                    System.out.println(record);
//                })

                // 使用 handle 的方法来处理消费的数据
                // .notifying(new JsonChangeConsumer())

                .notifying(new KafkaChangeConsumer())

                .build()
        ) {

            // Run the engine asynchronously ...
            ExecutorService executor = Executors.newSingleThreadExecutor();
            executor.execute(engine);

            // Do something else or wait for a signal or an event
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        // Engine is stopped when the main code is finished

    }

}

class KafkaChangeConsumer implements DebeziumEngine.ChangeConsumer<ChangeEvent<SourceRecord, SourceRecord>> {

    @Override
    public void handleBatch(List<ChangeEvent<SourceRecord, SourceRecord>> records,
                            RecordCommitter<ChangeEvent<SourceRecord, SourceRecord>> committer) throws InterruptedException {

    }
}

class JsonChangeConsumer implements DebeziumEngine.ChangeConsumer<ChangeEvent<String, String>> {

    @Override
    public void handleBatch(List<ChangeEvent<String, String>> records,
                            RecordCommitter<ChangeEvent<String, String>> committer) throws InterruptedException {
        //
        for (ChangeEvent<String, String> record : records) {
            System.out.println("收到的数据: " + record);
        }




    }
}
