package com.wj.opratorlog.cdc.debezium;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.google.common.collect.Maps;
import io.debezium.config.Configuration;
import io.debezium.embedded.Connect;
import io.debezium.engine.DebeziumEngine;
import io.debezium.engine.RecordChangeEvent;
import io.debezium.engine.format.ChangeEventFormat;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.header.Headers;
import org.apache.kafka.connect.source.SourceRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.expression.Operation;
import org.springframework.stereotype.Component;
import org.springframework.util.ObjectUtils;


import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.*;
import java.util.stream.Collectors;

import static java.util.stream.Collectors.toMap;


/**
 *  监听
 */
@Slf4j
@Component
public class DebeziumListener {

    private final Executor threadPoolExecutor = new ThreadPoolExecutor(4, 40, 60, TimeUnit.SECONDS, new LinkedBlockingQueue<>(1000));
    private final DebeziumEngine<RecordChangeEvent<SourceRecord>> debeziumEngine;


    public DebeziumListener(Configuration customerConnectorConfiguration) {

        this.debeziumEngine = DebeziumEngine.create(ChangeEventFormat.of(Connect.class))

                .using(customerConnectorConfiguration.asProperties())
                .notifying(this::handleChangeEvent)
                .using(new DebeziumEngine.CompletionCallback() {
                    @Override
                    public void handle(boolean b, String s, Throwable throwable) {
                        if (throwable != null) {
                            log.error("Error while processing change event", throwable);
                        }
                        log.info("状态：" + s);
                    }
                })
                .build();
    }




  /*  @Bean
    io.debezium.config.Configuration debeziumConfig() {
        return io.debezium.config.Configuration.create()
                //  连接器的Java类名称
                .with("connector.class", SqlServerConnector.class.getName())
                // 偏移量持久化，用来容错 默认值
                .with("offset.storage", "org.apache.kafka.connect.storage.FileOffsetBackingStore")
                // 要存储偏移量的文件路径 默认/tmp/offsets.dat  如果路径配置不正确可能导致无法存储偏移量 可能会导致重复消费变更
                // 如果连接器重新启动，它将使用最后记录的偏移量来知道它应该恢复读取源信息中的哪个位置。
                .with("offset.storage.file.filename", offsetFileName)
                // 尝试提交偏移量的时间间隔。默认值为 1分钟
                .with("offset.flush.interval.ms", offsetTime)
                // 监听连接器实例的 唯一名称
                .with("name", instanceName)
                // SQL Server 实例的地址
                .with("database.hostname", ip)
                // SQL Server 实例的端口号
                .with("database.port", port)
                // SQL Server 用户的名称
                .with("database.user", username)
                // SQL Server 用户的密码
                .with("database.password", password)
                // 要从中捕获更改的数据库的名称
                .with("database.dbname", includeDb)
                // 是否包含数据库表结构层面的变更 默认值true
                .with("include.schema.changes", "false")
                // Debezium 应捕获其更改的所有表的列表
                .with("table.include.list", includeTable)
                // SQL Server 实例/集群的逻辑名称，形成命名空间，用于连接器写入的所有 Kafka 主题的名称、Kafka Connect 架构名称以及 Avro 转换器时对应的 Avro 架构的命名空间用来
                .with("database.server.name", logicName)
                // 负责数据库历史变更记录持久化Java 类的名称
                .with("database.history", "io.debezium.relational.history.FileDatabaseHistory")
                // 历史变更记录存储位置，存储DDL
                .with("database.history.file.filename", historyFileName)
                .build();
    }*/


    private void handleChangeEvent(RecordChangeEvent<SourceRecord> sourceRecordRecordChangeEvent) {
        SourceRecord sourceRecord = sourceRecordRecordChangeEvent.record();


        Object value2 = sourceRecord.value();
        Object key1 = sourceRecord.key();
        String topic = sourceRecord.topic();
        Schema keyschema = sourceRecord.keySchema();
        Schema vlaueschema = sourceRecord.valueSchema();

        Headers headers = sourceRecord.headers();
        Long timestamp = sourceRecord.timestamp();

        Struct data = (Struct) value2;
        if (data == null) {
            return;
        }
        Optional<Field> fistFeild = data.schema().fields().stream().filter(item -> item.name().equals("op")).findFirst();
        if (!fistFeild.isPresent()) {
            return;
        }
        // 操作类型

        String op = (String) data.get("op");
        // 全量
        if (Objects.equals("r", op)) {
            return;
        }


        Struct  source= data.getStruct("source");
        log.info("=====begin========");
        log.info("操作数据库:{},表名:{}",source.get("db") ,source.get("table"));
        // 新增
        if (Objects.equals("c", op)) {
            Struct after = data.getStruct("after");
            Map<String, Object> addData = buildData(after.schema().fields(), after);
            log.info("新增数据===========:" +  JSON.toJSONString(addData, SerializerFeature.WRITE_MAP_NULL_FEATURES));
        }
        // 修改
        if (Objects.equals("u", op)) {
            Struct before = data.getStruct("before");
            Map<String, Object> beforeData = buildData(before.schema().fields(), before);
            log.info("修改前数据===========:" +  JSON.toJSONString(beforeData));
            Struct after = data.getStruct("after");
            Map<String, Object> afterData =  buildData(after.schema().fields(), after);
            log.info("修改后数据===========:" + JSON.toJSONString(afterData));
        }
        // 删除
        if (Objects.equals("d", op)) {
            Struct before = data.getStruct("before");
            Map<String, Object> deleteData = buildData(before.schema().fields(), before);
            log.info("删除数据===========:" + JSON.toJSONString(deleteData));
        }
        log.info("=====end========");
    }


    private Map<String, Object> buildData(List<Field> list, Struct struct) {
        Map<String, Object> beforeData =     Collections.synchronizedMap(Maps.newHashMap()) ;
        list.forEach(fieldName -> {
              beforeData.put(fieldName.name(), struct.get(fieldName.name()));


        });


        return beforeData;
    }

    @PostConstruct
    private void start() {
        this.threadPoolExecutor.execute(debeziumEngine);
    }


    @PreDestroy
    private void stop() throws IOException {
        if (Objects.nonNull(this.debeziumEngine)) {
            this.debeziumEngine.close();
        }
    }


}
