package com.djf.flinkcdc.service;

import com.djf.flinkcdc.config.FlinkCdcProperties;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import java.io.Serializable;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;

/**
 * Canal 风格的数据同步服务
 * 提供无感知的增删改操作数据同步，类似 Canal 的事件监听机制
 * 
 * 核心功能：
 * 1. 事件监听：监听 MySQL binlog 变化事件
 * 2. 数据解析：解析增删改操作的具体数据内容
 * 3. 事件分发：根据操作类型分发到不同的处理器
 * 4. 数据缓存：缓存变更前后的数据状态
 * 5. 回调通知：支持自定义回调函数处理数据变更
 */
@Slf4j
@Service
public class CanalStyleSyncService  implements Serializable {
    private static final long serialVersionUID = 1L;

    @Autowired
    private FlinkCdcProperties properties;

    private StreamExecutionEnvironment env;
    private MySqlSource<String> mySqlSource;
    private final AtomicBoolean isRunning = new AtomicBoolean(false);
    private CompletableFuture<Void> syncTask;
    
    // Canal 风格的事件统计
    private final AtomicLong insertCount = new AtomicLong(0);
    private final AtomicLong updateCount = new AtomicLong(0);
    private final AtomicLong deleteCount = new AtomicLong(0);
    
    // 数据变更事件监听器集合
    private final List<DataChangeListener> listeners = new ArrayList<>();
    
    // 表数据缓存，用于对比变更前后的数据
    private final Map<String, Map<String, Object>> tableDataCache = new ConcurrentHashMap<>();

    @PostConstruct
    public void init() {
        log.info("🎯 初始化 Canal 风格数据同步服务...");
        setupFlinkEnvironment();
        buildMySqlSource();
        registerDefaultListeners();
        log.info("✅ Canal 风格数据同步服务初始化完成");
    }

    /**
     * 配置 Flink 执行环境
     * 针对 Canal 风格同步进行优化配置
     */
    private void setupFlinkEnvironment() {
        Configuration config = new Configuration();
        env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(config);
        
        // 设置并行度为1，保证数据顺序性（Canal 特性）
        env.setParallelism(1);
        
        // 配置重启策略 - 更激进的重试策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, Time.seconds(5)));
        
        // 配置 Checkpoint - 更频繁的检查点保证数据一致性
        if (properties.getCheckpoint().getEnabled()) {
            env.enableCheckpointing(Math.min(properties.getCheckpoint().getInterval(), 30000)); // 最多30秒
            env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
            env.getCheckpointConfig().setMinPauseBetweenCheckpoints(200); // 更短的间隔
            env.getCheckpointConfig().setCheckpointTimeout(30000); // 30秒超时
            env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
            env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
            
            // 设置状态后端
            env.setStateBackend(new FsStateBackend(properties.getCheckpoint().getStorage() + "/canal-style"));
        }
        
        log.info("🔧 Canal 风格 Flink 环境配置完成 - 并行度: 1, 快速检查点: {}ms", 
                Math.min(properties.getCheckpoint().getInterval(), 30000));
    }

    /**
     * 构建 MySQL CDC 源
     * 配置为实时监听模式，类似 Canal
     */
    private void buildMySqlSource() {
        // Canal 风格默认使用 latest-offset，只监听新的变更
        StartupOptions startupOptions = StartupOptions.latest();
        
        mySqlSource = MySqlSource.<String>builder()
                .hostname(properties.getMysql().getHostname())
                .port(properties.getMysql().getPort())
                .username(properties.getMysql().getUsername())
                .password(properties.getMysql().getPassword())
                .databaseList(properties.getMysql().getDatabaseList())
                .tableList(properties.getMysql().getTableList())
                .startupOptions(startupOptions)
                .serverId(properties.getMysql().getServerId()) // 使用配置的 serverId，避免与原服务冲突
                .serverTimeZone(properties.getMysql().getServerTimeZone())
                .deserializer(new JsonDebeziumDeserializationSchema())
                .includeSchemaChanges(false) // Canal 风格不关注 schema 变更
                .build();
        
        log.info("📡 Canal 风格 MySQL CDC 源构建完成 - 实时监听模式");
    }

    /**
     * 注册默认的数据变更监听器
     */
    private void registerDefaultListeners() {
        // 注册控制台输出监听器
        addDataChangeListener(new ConsoleDataChangeListener());
        
        // 注册统计监听器
        addDataChangeListener(new StatisticsDataChangeListener());
        
        log.info("📋 已注册 {} 个默认数据变更监听器", listeners.size());
    }

    /**
     * 添加数据变更监听器
     */
    public void addDataChangeListener(DataChangeListener listener) {
        listeners.add(listener);
        log.info("➕ 添加数据变更监听器: {}", listener.getClass().getSimpleName());
    }

    /**
     * 启动 Canal 风格数据同步
     */
    public synchronized void startCanalSync() {
        if (isRunning.get()) {
            log.warn("⚠️ Canal 风格同步任务已在运行中");
            return;
        }
        
        log.info("🚀 启动 Canal 风格数据同步任务...");
        
        syncTask = CompletableFuture.runAsync(() -> {
            try {
                isRunning.set(true);
                
                // 创建数据流
                DataStreamSource<String> sourceStream = env
                        .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "Canal Style MySQL CDC Source")
                        .setParallelism(1);
                
                // Canal 风格数据处理和事件分发
                sourceStream
                        .map(new CanalDataProcessor())
                        .name("Canal Data Processor")
                        .addSink(new CanalEventSink(new ArrayList<>(listeners)))
                        .name("Canal Event Sink");
                
                log.info("✅ Canal 风格数据同步任务开始执行");
                env.execute("Canal Style Flink CDC Sync Job");
                
            } catch (Exception e) {
                log.error("❌ Canal 风格数据同步任务执行失败: {}", e.getMessage(), e);
                throw new RuntimeException(e);
            } finally {
                isRunning.set(false);
                log.info("🔚 Canal 风格数据同步任务结束");
            }
        });
        
        log.info("✅ Canal 风格数据同步任务启动成功");
    }

    /**
     * 停止 Canal 风格数据同步
     */
    public synchronized void stopCanalSync() {
        if (!isRunning.get()) {
            log.warn("⚠️ Canal 风格同步任务未在运行");
            return;
        }
        
        log.info("🛑 停止 Canal 风格数据同步任务...");
        
        if (syncTask != null) {
            syncTask.cancel(true);
        }
        
        isRunning.set(false);
        log.info("✅ Canal 风格数据同步任务已停止");
    }

    /**
     * 获取同步状态
     */
    public boolean isCanalSyncRunning() {
        return isRunning.get();
    }

    /**
     * 获取 Canal 风格统计信息
     */
    public Map<String, Object> getCanalStatistics() {
        Map<String, Object> stats = new HashMap<>();
        stats.put("running", isRunning.get());
        stats.put("insertCount", insertCount.get());
        stats.put("updateCount", updateCount.get());
        stats.put("deleteCount", deleteCount.get());
        stats.put("totalEvents", insertCount.get() + updateCount.get() + deleteCount.get());
        stats.put("listenersCount", listeners.size());
        stats.put("cachedTables", tableDataCache.size());
        return stats;
    }

    /**
     * Canal 风格数据处理器
     * 解析 CDC 数据并构建 Canal 风格的数据变更事件
     */
    public static class CanalDataProcessor extends RichMapFunction<String, CanalDataChangeEvent> implements Serializable {
        private static final long serialVersionUID = 1L;
        private static final Logger log = LoggerFactory.getLogger(CanalDataProcessor.class);
        private final Map<String, Map<String, Object>> tableDataCache = new ConcurrentHashMap<>();
        private transient ObjectMapper objectMapper;
        
        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
            this.objectMapper = new ObjectMapper();
        }
        
        @Override
        public CanalDataChangeEvent map(String jsonData) throws Exception {
            try {
                // 解析 CDC 数据
                JsonNode jsonNode = objectMapper.readTree(jsonData);
                
                // 提取基本信息
                String database = jsonNode.path("source").path("db").asText();
                String table = jsonNode.path("source").path("table").asText();
                String operation = jsonNode.path("op").asText();
                long timestamp = jsonNode.path("source").path("ts_ms").asLong();
                
                // 提取变更前后的数据
                Map<String, Object> beforeData = extractData(jsonNode.path("before"));
                Map<String, Object> afterData = extractData(jsonNode.path("after"));
                
                // 构建 Canal 风格事件
                CanalDataChangeEvent event = new CanalDataChangeEvent();
                event.setDatabase(database);
                event.setTable(table);
                event.setOperation(mapOperationType(operation));
                event.setTimestamp(timestamp);
                event.setBeforeData(beforeData);
                event.setAfterData(afterData);
                event.setRawData(jsonData);
                
                // 更新缓存
                updateTableCache(database + "." + table, afterData, operation);
                
                return event;
                
            } catch (Exception e) {
                log.error("❌ Canal 数据处理失败: {}", jsonData, e);
                // 返回错误事件
                CanalDataChangeEvent errorEvent = new CanalDataChangeEvent();
                errorEvent.setOperation(CanalOperationType.ERROR);
                errorEvent.setRawData(jsonData);
                return errorEvent;
            }
        }
        
        private Map<String, Object> extractData(JsonNode dataNode) {
            Map<String, Object> data = new HashMap<>();
            if (dataNode != null && !dataNode.isNull()) {
                dataNode.fields().forEachRemaining(entry -> {
                    data.put(entry.getKey(), entry.getValue().asText());
                });
            }
            return data;
        }
        
        private CanalOperationType mapOperationType(String op) {
            switch (op) {
                case "c": return CanalOperationType.INSERT;
                case "u": return CanalOperationType.UPDATE;
                case "d": return CanalOperationType.DELETE;
                case "r": return CanalOperationType.READ;
                default: return CanalOperationType.UNKNOWN;
            }
        }
        
        private void updateTableCache(String tableKey, Map<String, Object> data, String operation) {
            if ("d".equals(operation)) {
                tableDataCache.remove(tableKey);
            } else if (!data.isEmpty()) {
                tableDataCache.put(tableKey, new HashMap<>(data));
            }
        }
    }

    /**
     * Canal 事件输出器
     * 将处理后的事件分发给所有监听器
     */
    public static class CanalEventSink extends RichSinkFunction<CanalDataChangeEvent> implements Serializable {
        private static final long serialVersionUID = 1L;

        private static final Logger log = LoggerFactory.getLogger(CanalEventSink.class);
        private final AtomicLong insertCount = new AtomicLong(0);
        private final AtomicLong updateCount = new AtomicLong(0);
        private final AtomicLong deleteCount = new AtomicLong(0);
        private final List<DataChangeListener> listeners = new ArrayList<>();
        
        public CanalEventSink(List<DataChangeListener> listeners) {
            this.listeners.addAll(listeners);
        }
        
        @Override
        public void invoke(CanalDataChangeEvent event, Context context) throws Exception {
            // 更新统计计数
            updateStatistics(event.getOperation());
            
            // 分发事件给所有监听器
            for (DataChangeListener listener : listeners) {
                try {
                    listener.onDataChange(event);
                } catch (Exception e) {
                    log.error("❌ 监听器 {} 处理事件失败: {}", listener.getClass().getSimpleName(), e.getMessage());
                }
            }
        }
        
        private void updateStatistics(CanalOperationType operation) {
            switch (operation) {
                case INSERT:
                    insertCount.incrementAndGet();
                    break;
                case UPDATE:
                    updateCount.incrementAndGet();
                    break;
                case DELETE:
                    deleteCount.incrementAndGet();
                    break;
            }
        }
    }

    /**
     * Canal 风格数据变更事件
     */
    public static class CanalDataChangeEvent implements Serializable {
        private static final long serialVersionUID = 1L;
        private String database;
        private String table;
        private CanalOperationType operation;
        private long timestamp;
        private Map<String, Object> beforeData;
        private Map<String, Object> afterData;
        private String rawData;
        
        // Getters and Setters
        public String getDatabase() { return database; }
        public void setDatabase(String database) { this.database = database; }
        
        public String getTable() { return table; }
        public void setTable(String table) { this.table = table; }
        
        public CanalOperationType getOperation() { return operation; }
        public void setOperation(CanalOperationType operation) { this.operation = operation; }
        
        public long getTimestamp() { return timestamp; }
        public void setTimestamp(long timestamp) { this.timestamp = timestamp; }
        
        public Map<String, Object> getBeforeData() { return beforeData; }
        public void setBeforeData(Map<String, Object> beforeData) { this.beforeData = beforeData; }
        
        public Map<String, Object> getAfterData() { return afterData; }
        public void setAfterData(Map<String, Object> afterData) { this.afterData = afterData; }
        
        public String getRawData() { return rawData; }
        public void setRawData(String rawData) { this.rawData = rawData; }
        
        @Override
        public String toString() {
            return String.format("CanalEvent{db=%s, table=%s, op=%s, timestamp=%d}", 
                    database, table, operation, timestamp);
        }
    }

    /**
     * Canal 操作类型枚举
     */
    public enum CanalOperationType implements Serializable {
        INSERT("新增"),
        UPDATE("更新"),
        DELETE("删除"),
        READ("读取"),
        UNKNOWN("未知"),
        ERROR("错误");
        
        private final String description;
        
        CanalOperationType(String description) {
            this.description = description;
        }
        
        public String getDescription() {
            return description;
        }
    }

    /**
     * 数据变更监听器接口
     */
    public interface DataChangeListener extends Serializable {
        void onDataChange(CanalDataChangeEvent event);
    }

    /**
     * 控制台输出监听器
     */
    public static class ConsoleDataChangeListener implements DataChangeListener, Serializable {
        private static final long serialVersionUID = 1L;
        private static final Logger log = LoggerFactory.getLogger(ConsoleDataChangeListener.class);
        private static final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
        
        @Override
        public void onDataChange(CanalDataChangeEvent event) {
            String time = LocalDateTime.now().format(formatter);
            String message = String.format(
                "[%s] 🎯 Canal事件 | %s.%s | %s | 时间戳: %d",
                time, event.getDatabase(), event.getTable(), 
                event.getOperation().getDescription(), event.getTimestamp()
            );
            
            // 根据操作类型使用不同的日志级别
            switch (event.getOperation()) {
                case INSERT:
                    log.info("➕ {} | 新增数据: {}", message, event.getAfterData());
                    break;
                case UPDATE:
                    log.info("🔄 {} | 更新前: {} | 更新后: {}", message, event.getBeforeData(), event.getAfterData());
                    break;
                case DELETE:
                    log.info("🗑️ {} | 删除数据: {}", message, event.getBeforeData());
                    break;
                case ERROR:
                    log.error("❌ {} | 错误数据: {}", message, event.getRawData());
                    break;
                default:
                    log.debug("📄 {}", message);
            }
        }
    }

    /**
     * 统计监听器
     */
    public static class StatisticsDataChangeListener implements DataChangeListener, Serializable {
        private static final long serialVersionUID = 1L;
        private static final Logger log = LoggerFactory.getLogger(StatisticsDataChangeListener.class);
        private long lastLogTime = System.currentTimeMillis();
        private final long LOG_INTERVAL = 60000; // 每分钟输出一次统计
        
        // 内部统计计数器
        private final AtomicLong insertCount = new AtomicLong(0);
        private final AtomicLong updateCount = new AtomicLong(0);
        private final AtomicLong deleteCount = new AtomicLong(0);
        
        @Override
        public void onDataChange(CanalDataChangeEvent event) {
            // 更新内部统计
            switch (event.getOperation()) {
                case INSERT:
                    insertCount.incrementAndGet();
                    break;
                case UPDATE:
                    updateCount.incrementAndGet();
                    break;
                case DELETE:
                    deleteCount.incrementAndGet();
                    break;
            }
            
            long currentTime = System.currentTimeMillis();
            if (currentTime - lastLogTime >= LOG_INTERVAL) {
                long totalEvents = insertCount.get() + updateCount.get() + deleteCount.get();
                log.info("📊 Canal统计 | 新增: {} | 更新: {} | 删除: {} | 总计: {}",
                        insertCount.get(), updateCount.get(), deleteCount.get(), totalEvents);
                lastLogTime = currentTime;
            }
        }
    }
}

