package com.cui.project.config;

import com.alibaba.otter.canal.client.CanalConnector;
import com.alibaba.otter.canal.client.CanalConnectors;
import com.alibaba.otter.canal.protocol.CanalEntry;
import com.alibaba.otter.canal.protocol.Message;
import com.api.common.alert.CommonAlertHelper;
import com.cui.project.utils.DateFormatUtil;
import com.cui.project.utils.ReflectionUtil;
import com.google.gson.Gson;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.UpdateQuery;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.net.InetSocketAddress;
import java.util.*;
import java.util.stream.Collectors;

/**
 * @author: 崔老爷
 * Date: 2024/10/22 11:24
 * @Description: InitializingBean:当 Spring 容器实例化一个 Bean 并完成依赖注入后，afterPropertiesSet() 方法会被自动调用。你可以在这个方法中编写一些初始化代码。
 */
@ConfigurationProperties(prefix = "canal")
@Component
@Slf4j
@Setter
public class CanalClient implements InitializingBean, DisposableBean {
    private String host;
    private String port;
    private String username;
    private String password;
    private String destination;
    @Resource
    private ElasticsearchRestTemplate elasticsearchRestTemplate;
    @Resource
    private CommonAlertHelper alertHelper;
//    @Resource
//    private ExecutorService executorService;

    private final static int BATCH_SIZE = 1000;
    private final static String ES_DTO_PACK = "com.cui.project.model.dto";
    private final static Set<Class<?>> CLASSES_ESDTO;
    private final static String SUBSCRIBE_TABLES="yuapi.*";

    private volatile boolean running=false;
    private CanalConnector connector;
    private Thread consumerThread;


    //过滤出被Document标注过的类
    static {
        CLASSES_ESDTO = ReflectionUtil.getClasses(ES_DTO_PACK).stream().filter(
                item -> item.isAnnotationPresent(Document.class)
        ).collect(Collectors.toSet());
    }

    @Override
    public void afterPropertiesSet() throws Exception {
        // 创建链接
        CanalConnector connector = CanalConnectors.newSingleConnector(
                new InetSocketAddress(host, Integer.parseInt(port)),
                destination,
                Optional.ofNullable(username).orElse(""),
                Optional.ofNullable(password).orElse(""));

        running=true;
        Runnable runnable = () -> {
            try {
                int failCount = 0;
                //打开连接
                connector.connect();
                //订阅数据库表,全部表
                connector.subscribe(SUBSCRIBE_TABLES);
                log.info("Canal客户端启动成功");
                while (running) {
                    try {
                        // 获取指定数量的数据 每次获取1000条数据
                        Message message = connector.getWithoutAck(BATCH_SIZE);
                        //获取批量ID
                        long batchId = message.getId();
                        //获取批量的数量
                        int size = message.getEntries().size();
                        //如果没有数据
                        if (batchId == -1 || size == 0) {
                            //线程休眠2秒 防止重复链接数据库
                            Thread.sleep(2000);
                        } else {
                            //如果有数据,处理数据
                            try {
                                insertEsEntry(message.getEntries());
                            } catch (Exception e) {
                                if ((failCount++) <= 3) {
                                    log.error("消费失败，回滚，batchId={} error {}", message.getId(), e);
                                    connector.rollback();
                                } else {
                                    failCount = 0;
                                    log.error("消费失败次数过多，停止消费，停止回滚，batchId={}", message.getId());
                                    alertHelper.canalSyncFailure(SUBSCRIBE_TABLES, "api", "*",
                                            "消费失败次数过多：" + failCount);
                                }
                            }
                        }
                        connector.ack(batchId);
                    }catch (InterruptedException e) {
                        Thread.currentThread().interrupt();
                        log.info("Canal消费线程被中断");
                        break;
                    }catch (Exception e){
                        alertHelper.canalSyncFailure(SUBSCRIBE_TABLES, "api", "*", e.getMessage());
                        log.error("Canal消费异常", e);
                        // 尝试重连
                        reconnect();
                    }
                }
            } finally {
                cleanup();
            }
        };
        consumerThread = new Thread(runnable,"canal-thread");
//        thread.setDaemon(true);//守护线程在 JVM 退出时会被强制终止，可能导致数据丢失
        consumerThread.start();
    }


    private void insertEsEntry(List<CanalEntry.Entry> entries) {
        for (CanalEntry.Entry entry : entries) {
            if (entry.getEntryType() == CanalEntry.EntryType.TRANSACTIONBEGIN ||
                    entry.getEntryType() == CanalEntry.EntryType.TRANSACTIONEND) {
                //开启/关闭事务的实体类型，跳过
                continue;
            }
            // 解析binlog
            CanalEntry.RowChange rowChange = null;
            try {
                rowChange = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
            } catch (Exception e) {
                log.error("解析出现异常 data: {} {}", entry.toString(), e);
                alertHelper.esIndexFailure(
                        entry.getHeader().getTableName(),
                        entry.getHeader().getLogfileName(),
                        "解析binlog失败：" + e.getMessage()
                );
                throw new RuntimeException("解析出现异常 data:" + entry.toString(), e);
            }
            //获取操作类型：insert/update/delete类型
            CanalEntry.EventType eventType = rowChange.getEventType();
            //打印Header信息
            log.info("binlog[{}:{}] , name[{},{}] , eventType : {}", entry.getHeader().getLogfileName(), entry.getHeader().getLogfileOffset(),
                    entry.getHeader().getSchemaName(), entry.getHeader().getTableName(),
                    eventType);
            //判断是否是DDL语句
            if (rowChange.getIsDdl()) {
                log.info("isDdl: true,sql: {}", rowChange.getSql());
            }
            Class<?> esDtoClass = getEsDtoClass(entry.getHeader().getTableName());
            if (esDtoClass == null) {
                break;
            }
            //获取RowChange对象里的每一行数据，打印出来
            for (CanalEntry.RowData rowData : rowChange.getRowDatasList()) {
                switch (eventType) {
                    case DELETE:
                        handleDelete(rowData.getBeforeColumnsList(),
                                esDtoClass);
                        printColumn(rowData.getBeforeColumnsList());
                        break;
                    case INSERT:
                        log.info("------->; before");
                        handleInsert(rowData.getAfterColumnsList(), esDtoClass
                        );
                        printColumn(rowData.getBeforeColumnsList());
                        break;
                    case UPDATE:
                        //变更后的数据
                        log.info("------->; after");
                        handleUpdate(rowData.getAfterColumnsList(), esDtoClass);
                        printColumn(rowData.getAfterColumnsList());
                        break;
                    default:
                }
            }
        }

    }

    private void handleDelete(List<CanalEntry.Column> columns, Class<?> classz) {
        String id = "";
        for (CanalEntry.Column column : columns) {
            if ("id".equals(column.getName())) {
                id = column.getValue();
                break;
            }
        }
        if (elasticsearchRestTemplate.exists(id, classz)) {
            elasticsearchRestTemplate.delete(id, classz);
            log.info("canal 删除数据 es 中 id={}", id);
        }

    }

    private void handleUpdate(List<CanalEntry.Column> columns, Class<?> classz) {
        Map<String, Object> tmpMap = new HashMap<>();
        String id = "";
        String isDelete = "0";
        for (CanalEntry.Column column : columns) {
            if ("id".equals(column.getName())) {
                id = column.getValue();
            }
            if ("isDelete".equals(column.getName())) {
                isDelete = column.getValue();
            }            // 检查是否是日期字段并格式化日期
            if ("createTime".equals(column.getName()) || "updateTime".equals(column.getName())) {
                String originalDate = column.getValue();
                tmpMap.put(column.getName(), DateFormatUtil.formatDate(originalDate));
            } else {
                tmpMap.put(column.getName(), column.getValue());
            }
        }
        if (elasticsearchRestTemplate.exists(id, classz)) {
            if ("1".equals(isDelete)) {
                elasticsearchRestTemplate.delete(id, classz);
                log.info("canal 删除数据 es 中 id={} isDelete={}", id, isDelete);
                return;
            }
            Gson gson = new Gson();
            String json = gson.toJson(tmpMap);
            UpdateQuery updateQuery = UpdateQuery.builder(id)
                    .withDocument(org.springframework.data.elasticsearch.core.document.Document.create()
                            .fromJson(json))
                    .build();
            IndexCoordinates indexCoordinates = IndexCoordinates.of(classz.getAnnotation(Document.class).indexName());
            elasticsearchRestTemplate.update(updateQuery, indexCoordinates);
            log.info("canal 更新数据数据 es 中 data={}", json);
        }else{
            handleInsert(columns,classz);
        }
    }

    private void handleInsert(List<CanalEntry.Column> columns, Class<?> classz) {
        Map<String, Object> tmpMap = new HashMap<>();
        String id = "";
        for (CanalEntry.Column column : columns) {
            if ("id".equals(column.getName())) {
                id = column.getValue();
            }
            tmpMap.put(column.getName(), column.getValue());
        }
        if (!elasticsearchRestTemplate.exists(id, classz)) {
            Gson gson = new Gson();
            String json = gson.toJson(tmpMap);
            Object esDto = gson.fromJson(json, classz);
            elasticsearchRestTemplate.save(esDto);
            log.info("canal 插入数据到 es 中 {}", esDto);
        }


    }

    private Class<?> getEsDtoClass(String tableName) {
        String preix = "_index";
        for (Class<?> aClass : CLASSES_ESDTO) {
            // 检查类是否标记了 @Document 注解
            if (aClass.isAnnotationPresent(Document.class)) {
                Document annotation = aClass.getAnnotation(Document.class);
                final String indexName = annotation.indexName();
                final String substring = indexName.substring(0, indexName.lastIndexOf(preix));
                if (substring.equals(tableName)) {
                    return aClass;
                }
            }
        }
        return null;
    }

    private static void printColumn(List<CanalEntry.Column> columns) {
        for (CanalEntry.Column column : columns) {
            log.info("{} : {}  update={}", column.getName(), column.getValue(), column.getUpdated());
        }
    }

    private void cleanup(){
        if(connector!=null){
            try {
                connector.disconnect();
                log.info("Canal连接已断开");
            } catch (Exception e) {
                log.error("关闭Canal连接失败", e);
            }
        }
    }

    /**
     * 重新链接
     */
    private void reconnect(){
        int maxRetries=3;
        int retryCount=0;
        long backoffMs = 5000; // 初始退避时间5秒
        while (running&&retryCount<maxRetries){
            try {
                log.info("尝试重连Canal，第{}次", retryCount + 1);
                cleanup();
                // 指数退避：5s, 10s, 20s, 40s, 60s...
                long waitMs = Math.min(backoffMs * (1L << retryCount), 60000L);
                Thread.sleep(waitMs);

                connector.connect();
                connector.subscribe(SUBSCRIBE_TABLES);
                log.info("Canal重连成功");
                return;
            }catch (Exception e){
                retryCount++;
                log.error("Canal重连失败，第{}次", retryCount, e);
                if (retryCount >= maxRetries) {
                    alertHelper.canalConnectionLost(destination, retryCount);
                }
            }
        }

    }

    @Override
    public void destroy() throws Exception {
        running = false;
        if (consumerThread != null && consumerThread.isAlive()) {
            consumerThread.interrupt();
            consumerThread.join(5000); // 等待5秒
        }
        cleanup();
        log.info("Canal客户端已关闭");
    }
}
