//package com.shortlink.common.biglog;
//
//import com.alibaba.otter.canal.client.CanalConnector;
//import com.alibaba.otter.canal.client.CanalConnectors;
//import com.alibaba.otter.canal.protocol.CanalEntry;
//import com.alibaba.otter.canal.protocol.Message;
//import org.springframework.beans.factory.annotation.Value;
//import org.springframework.kafka.core.KafkaTemplate;
//import org.springframework.stereotype.Component;
//
//import javax.annotation.PostConstruct;
//import javax.annotation.PreDestroy;
//import java.net.InetSocketAddress;
//import java.util.List;
//import java.util.stream.Collectors;
//
//@Component
//public class CanalClient {
//
//    @Value("${canal.server}")
//    private String canalServer;
//
//    @Value("${canal.destination}")
//    private String canalDestination;
//
////    <dependency>
////        <groupId>com.alibaba.otter</groupId>
////        <artifactId>canal.client</artifactId>
////        <version>1.1.6</version>
////    </dependency>
////
////    canal:
////        server: 127.0.0.1:11111  # Canal 服务地址
////        destination: example     # Canal 目的地名称
//
//    private CanalConnector connector;
//
//    private final KafkaTemplate<String, String> kafkaTemplate;
//
//    public CanalClient(KafkaTemplate<String, String> kafkaTemplate) {
//        this.kafkaTemplate = kafkaTemplate;
//    }
//
//    @PostConstruct
//    public void start() {
//        // 创建 Canal 连接器
//        connector = CanalConnectors.newSingleConnector(new InetSocketAddress(canalServer.split(":")[0],
//                Integer.parseInt(canalServer.split(":")[1])), canalDestination, "", "");
//        connector.connect();
//        connector.subscribe(".*\\..*");  // 订阅所有数据库和表
//
//        new Thread(() -> {
//            while (true) {
//                Message message = connector.getWithoutAck(100);  // 获取 Binlog 数据
//                if (message != null && message.getEntries().size() > 0) {
//                    for (CanalEntry.Entry entry : message.getEntries()) {
//                        if (entry.getEntryType() == CanalEntry.EntryType.ROWDATA) {
//                            CanalEntry.RowChange rowChange = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
//                            CanalBinlogEvent event = buildEvent(entry, rowChange);
//                            sendToKafka(event);  // 发送到 Kafka
//                        }
//                    }
//                }
//                connector.ack(message.getId());  // 确认消息
//            }
//        }).start();
//    }
//
//    private CanalBinlogEvent buildEvent(CanalEntry.Entry entry, CanalEntry.RowChange rowChange) {
//        CanalBinlogEvent event = new CanalBinlogEvent();
//        event.setDatabase(entry.getHeader().getSchemaName());
//        event.setTable(entry.getHeader().getTableName());
//        event.setType(rowChange.getEventType().toString());
//        event.setEs(entry.getHeader().getExecuteTime());
//        event.setTs(System.currentTimeMillis());
//
//        List<Map<String, Object>> data = rowChange.getRowDatasList().stream()
//                .map(rowData -> rowData.getAfterColumnsList().stream()
//                        .collect(Collectors.toMap(CanalEntry.Column::getName, CanalEntry.Column::getValue)))
//                .collect(Collectors.toList());
//        event.setData(data);
//
//        return event;
//    }
//
//    private void sendToKafka(CanalBinlogEvent event) {
//        kafkaTemplate.send("canal-topic", event.getTable(), event.toString());
//    }
//
//    @PreDestroy
//    public void stop() {
//        connector.disconnect();
//    }
//}