package com.datareport.config;

import jakarta.annotation.PostConstruct;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.util.HashMap;
import java.util.Map;

/**
 * Flink计算配置类
 * 管理Flink作业的配置参数，整合Spring Boot配置属性
 */
@Component
@Data
@Slf4j
public class FlinkComputeConfig {

    @Autowired
    private FlinkComputeProperties properties;

    // 缓存配置值
    private int parallelism;
    private long checkpointInterval;
    private long checkpointTimeout;
    private CheckpointingMode checkpointingMode;
    private long minPauseBetweenCheckpoints;
    private int maxConcurrentCheckpoints;
    private boolean enableExternalizedCheckpoints;
    private String stateBackend;
    private String stateBackendPath;
    private RestartConfig restart;
    private WindowConfig window;
    private KafkaConfig kafka;
    private DatabaseConfig database;
    private PerformanceConfig performance;

    /**
     * 初始化配置
     */
    @PostConstruct
    public void init() {
        if (properties != null) {
            // 从Spring Boot配置属性中加载配置
            this.parallelism = properties.getJob().getParallelism();
            this.checkpointInterval = properties.getCheckpoint().getInterval();
            this.checkpointTimeout = properties.getCheckpoint().getTimeout();
            this.checkpointingMode = CheckpointingMode.EXACTLY_ONCE;
            this.minPauseBetweenCheckpoints = properties.getCheckpoint().getMinPauseBetweenCheckpoints();
            this.maxConcurrentCheckpoints = properties.getCheckpoint().getMaxConcurrentCheckpoints();
            this.enableExternalizedCheckpoints = properties.getCheckpoint().isExternalizedCheckpointsEnabled();
            this.stateBackend = properties.getStateBackend().getType();
            this.stateBackendPath = properties.getStateBackend().getCheckpointDir();
            this.restart = new RestartConfig(properties);
            this.window = new WindowConfig(properties);
            this.kafka = new KafkaConfig(properties);
            this.database = new DatabaseConfig(properties);
            this.performance = new PerformanceConfig(properties);
            
            log.info("Flink计算配置初始化完成，从Spring Boot配置属性加载");
        } else {
            // 使用默认值
            this.parallelism = 4;
            this.checkpointInterval = 60000;
            this.checkpointTimeout = 300000;
            this.checkpointingMode = CheckpointingMode.EXACTLY_ONCE;
            this.minPauseBetweenCheckpoints = 30000;
            this.maxConcurrentCheckpoints = 1;
            this.enableExternalizedCheckpoints = true;
            this.stateBackend = "rocksdb";
            this.stateBackendPath = "file:///tmp/flink-checkpoints";
            this.restart = new RestartConfig();
            this.window = new WindowConfig();
            this.kafka = new KafkaConfig();
            this.database = new DatabaseConfig();
            this.performance = new PerformanceConfig();
            
            log.warn("Flink计算配置属性未注入，使用默认配置");
        }
    }

    /**
     * 重启策略配置
     */
    @Data
    public static class RestartConfig {
        private String strategy;
        private int attempts;
        private long delay;
        private long failureInterval;
        private int maxFailures;

        public RestartConfig() {
            this.strategy = "fixed-delay";
            this.attempts = 3;
            this.delay = 10;
            this.failureInterval = 60;
            this.maxFailures = 3;
        }

        public RestartConfig(FlinkComputeProperties properties) {
            FlinkComputeProperties.RestartConfig config = properties.getRestart();
            this.strategy = config.getStrategy();
            this.attempts = config.getMaxRestarts();
            this.delay = config.getDelayBetweenRestarts() / 1000; // 转换为秒
            this.failureInterval = config.getFailureInterval() / 1000; // 转换为秒
            this.maxFailures = config.getMaxFailuresPerInterval();
        }

        /**
         * 获取重启策略
         */
        public RestartStrategies.RestartStrategyConfiguration getRestartStrategy() {
            switch (strategy) {
                case "fixed-delay":
                    return RestartStrategies.fixedDelayRestart(attempts, Time.seconds(delay));
                case "failure-rate":
                    return RestartStrategies.failureRateRestart(
                            maxFailures, 
                            Time.seconds(failureInterval), 
                            Time.seconds(delay)
                    );
                case "none":
                default:
                    return RestartStrategies.noRestart();
            }
        }

        /**
         * 检查重启策略是否有效
         */
        public boolean isValid() {
            if (attempts < 0) {
                return false;
            }
            if (delay < 0) {
                return false;
            }
            if ("failure-rate".equals(strategy)) {
                if (failureInterval <= 0 || maxFailures <= 0) {
                    return false;
                }
            }
            return true;
        }
    }

    /**
     * 窗口配置
     */
    @Data
    public static class WindowConfig {
        private long size;
        private long allowedLateness;
        private String type;
        private long slide;
        private long sessionGap;

        public WindowConfig() {
            this.size = 300; // 5分钟（秒）
            this.allowedLateness = 60; // 1分钟（秒）
            this.type = "tumbling";
            this.slide = 60; // 1分钟（秒）
            this.sessionGap = 300; // 5分钟（秒）
        }

        public WindowConfig(FlinkComputeProperties properties) {
            FlinkComputeProperties.WindowConfig config = properties.getWindow();
            this.size = config.getSize() / 1000; // 转换为秒
            this.allowedLateness = config.getMaxLateness() / 1000; // 转换为秒
            this.type = "tumbling"; // 默认类型
            this.slide = config.getSlide() / 1000; // 转换为秒
            this.sessionGap = config.getSessionTimeout() / 1000; // 转换为秒
        }

        /**
         * 检查窗口配置是否有效
         */
        public boolean isValid() {
            if (size <= 0) {
                return false;
            }
            if (slide < 0) {
                return false;
            }
            if (allowedLateness < 0) {
                return false;
            }
            if (type == null || type.trim().isEmpty()) {
                return false;
            }
            if ("session".equals(type) && sessionGap <= 0) {
                return false;
            }
            return true;
        }
    }

    /**
     * Kafka配置
     */
    @Data
    public static class KafkaConfig {
        private String bootstrapServers;
        private String consumerGroupId;
        private String startFrom;
        private String deserializer;
        private String serializer;
        private Map<String, String> properties;

        public KafkaConfig() {
            this.bootstrapServers = "localhost:9092";
            this.consumerGroupId = "flink-compute-group";
            this.startFrom = "latest";
            this.deserializer = "org.apache.kafka.common.serialization.StringDeserializer";
            this.serializer = "org.apache.kafka.common.serialization.StringSerializer";
            this.properties = new HashMap<>();
            
            // 默认属性
            properties.put("enable.auto.commit", "false");
            properties.put("auto.commit.interval.ms", "1000");
            properties.put("session.timeout.ms", "30000");
            properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            properties.put("value.deserializer", deserializer);
            properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
            properties.put("value.serializer", serializer);
        }

        public KafkaConfig(FlinkComputeProperties Fproperties) {
            FlinkComputeProperties.KafkaConfig config = Fproperties.getKafka();
            this.bootstrapServers = config.getBootstrapServers();
            this.consumerGroupId = config.getConsumerGroupId();
            this.startFrom = config.getAutoOffsetReset();
            this.deserializer = "org.apache.kafka.common.serialization.StringDeserializer";
            this.serializer = "org.apache.kafka.common.serialization.StringSerializer";
            this.properties = new HashMap<>();
            
            // 默认属性
            properties.put("enable.auto.commit", String.valueOf(config.isEnableAutoCommit()));
            properties.put("session.timeout.ms", String.valueOf(config.getSessionTimeoutMs()));
            properties.put("heartbeat.interval.ms", String.valueOf(config.getHeartbeatIntervalMs()));
            properties.put("request.timeout.ms", String.valueOf(config.getRequestTimeoutMs()));
            properties.put("fetch.max.wait.ms", String.valueOf(config.getFetchMaxWaitMs()));
            properties.put("fetch.min.bytes", String.valueOf(config.getFetchMinBytes()));
            properties.put("fetch.max.bytes", String.valueOf(config.getFetchMaxBytes()));
        }

        /**
         * 检查Kafka配置是否有效
         */
        public boolean isValid() {
            if (bootstrapServers == null || bootstrapServers.trim().isEmpty()) {
                return false;
            }
            if (consumerGroupId == null || consumerGroupId.trim().isEmpty()) {
                return false;
            }
            if (startFrom == null || startFrom.trim().isEmpty()) {
                return false;
            }
            if (deserializer == null || deserializer.trim().isEmpty()) {
                return false;
            }
            if (serializer == null || serializer.trim().isEmpty()) {
                return false;
            }
            return true;
        }

        /**
         * 获取消费者配置
         */
        public Map<String, Object> getConsumerProperties() {
            Map<String, Object> consumerProps = new HashMap<>(properties);
            consumerProps.put("bootstrap.servers", bootstrapServers);
            consumerProps.put("group.id", consumerGroupId);
            consumerProps.put("auto.offset.reset", startFrom);
            consumerProps.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            consumerProps.put("value.deserializer", deserializer);
            return consumerProps;
        }

        /**
         * 获取生产者配置
         */
        public Map<String, Object> getProducerProperties() {
            Map<String, Object> producerProps = new HashMap<>();
            producerProps.put("bootstrap.servers", bootstrapServers);
            producerProps.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
            producerProps.put("value.serializer", serializer);
            return producerProps;
        }
    }

    /**
     * 数据库配置
     */
    @Data
    public static class DatabaseConfig {
        private String url;
        private String username;
        private String password;
        private String driver;
        private int batchSize;
        private long batchInterval;
        private long connectionTimeout;
        private String resultTablePrefix;

        public DatabaseConfig() {
            this.url = "jdbc:mysql://localhost:3306/data_report?useSSL=false&serverTimezone=UTC";
            this.username = "root";
            this.password = "password";
            this.driver = "com.mysql.cj.jdbc.Driver";
            this.batchSize = 1000;
            this.batchInterval = 5000;
            this.connectionTimeout = 30;
            this.resultTablePrefix = "stat_result_";
        }

        public DatabaseConfig(FlinkComputeProperties properties) {
            FlinkComputeProperties.DatabaseConfig config = properties.getDatabase();
            this.url = "jdbc:mysql://localhost:3306/datareport?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone=GMT%2B8";
            this.username = "root";
            this.password = "123456";
            this.driver = "com.mysql.cj.jdbc.Driver";
            this.batchSize = config.getBatchSize();
            this.batchInterval = config.getBatchIntervalMs();
            this.connectionTimeout = config.getConnectionTimeout();
            this.resultTablePrefix = config.getResultTablePrefix();
            
            // 记录配置信息，但不记录敏感信息
            log.debug("数据库配置已加载: batchSize={}, batchInterval={}, connectionTimeout={}, resultTablePrefix={}", 
                     batchSize, batchInterval, connectionTimeout, resultTablePrefix);
        }

        /**
         * 获取数据库连接池配置
         */
        public Map<String, String> getConnectionPoolProperties() {
            Map<String, String> poolProps = new HashMap<>();
            poolProps.put("maxConnections", String.valueOf(batchSize));
            poolProps.put("connectionTimeout", String.valueOf(connectionTimeout));
            return poolProps;
        }

        /**
         * 检查数据库配置是否有效
         */
        public boolean isValid() {
            if (url == null || url.trim().isEmpty()) {
                return false;
            }
            if (username == null || username.trim().isEmpty()) {
                return false;
            }
            if (password == null) {
                return false;
            }
            if (driver == null || driver.trim().isEmpty()) {
                return false;
            }
            if (batchSize <= 0) {
                return false;
            }
            if (connectionTimeout <= 0) {
                return false;
            }
            return true;
        }
    }

    /**
     * 性能调优配置
     */
    @Data
    public static class PerformanceConfig {
        private boolean objectReuse;
        private boolean incrementalCheckpoints;
        private boolean localRecovery;
        private int networkBufferSize;
        private int maxParallelism;
        private int minResources;
        private int maxResources;
        private int jvmHeapSize;
        private double managedMemoryFraction;
        private String executionMode;

        public PerformanceConfig() {
            this.objectReuse = true;
            this.incrementalCheckpoints = true;
            this.localRecovery = true;
            this.networkBufferSize = 32;
            this.maxParallelism = 128;
            this.minResources = 512;
            this.maxResources = 4096;
            this.jvmHeapSize = 1024;
            this.managedMemoryFraction = 0.7;
            this.executionMode = "STREAMING"; // 添加默认值
        }

        public PerformanceConfig(FlinkComputeProperties properties) {
            FlinkComputeProperties.PerformanceConfig config = properties.getPerformance();
            this.objectReuse = config.isObjectReuse();
            this.incrementalCheckpoints = true; // 默认启用
            this.localRecovery = true; // 默认启用
            this.networkBufferSize = 32; // 默认32KB
            this.maxParallelism = properties.getJob().getMaxParallelism();
            this.minResources = 512; // 默认512MB
            this.maxResources = 4096; // 默认4GB
            this.jvmHeapSize = config.getMemoryPerTaskSlot();
            this.managedMemoryFraction = 0.7; // 默认70%
        }
        /**
         * 获取执行模式枚举
         */
        public RuntimeExecutionMode getRuntimeExecutionMode() {
            switch (executionMode.toUpperCase()) {
                case "STREAMING":
                    return RuntimeExecutionMode.STREAMING;
                case "BATCH":
                    return RuntimeExecutionMode.BATCH;
                case "AUTOMATIC":
                default:
                    return RuntimeExecutionMode.AUTOMATIC;
            }
        }

        /**
         * 检查性能配置是否有效
         */
        public boolean isValid() {
            if (networkBufferSize <= 0) {
                return false;
            }
            if (maxParallelism <= 0) {
                return false;
            }
            if (minResources <= 0) {
                return false;
            }
            if (maxResources <= 0 || maxResources < minResources) {
                return false;
            }
            if (jvmHeapSize <= 0) {
                return false;
            }
            if (managedMemoryFraction < 0 || managedMemoryFraction > 1) {
                return false;
            }
            return true;
        }
    }

    /**
     * 获取配置摘要信息
     */
    public Map<String, Object> getConfigSummary() {
        Map<String, Object> summary = new HashMap<>();
        summary.put("parallelism", parallelism);
        summary.put("checkpointInterval", checkpointInterval);
        summary.put("checkpointTimeout", checkpointTimeout);
        summary.put("stateBackend", stateBackend);
        summary.put("restartStrategy", restart.getStrategy());
        summary.put("windowSize", window.getSize());
        summary.put("kafkaBootstrapServers", kafka.getBootstrapServers());
        summary.put("databaseUrl", database.getUrl());
        summary.put("checkpointingMode", checkpointingMode);
        summary.put("enableExternalizedCheckpoints", enableExternalizedCheckpoints);
        summary.put("maxConcurrentCheckpoints", maxConcurrentCheckpoints);
        return summary;
    }

    /**
     * 获取配置摘要
     */
    public String getConfigSummaryString() {
        return String.format("FlinkComputeConfig{parallelism=%d, checkpointInterval=%d, checkpointTimeout=%d, " +
                        "stateBackend='%s', stateBackendPath='%s', checkpointingMode='%s', " +
                        "enableExternalizedCheckpoints=%s, maxConcurrentCheckpoints=%d}",
                parallelism, checkpointInterval, checkpointTimeout, stateBackend, stateBackendPath,
                checkpointingMode, enableExternalizedCheckpoints, maxConcurrentCheckpoints);
    }

    /**
     * 验证配置有效性
     */
    public boolean validateConfig() {
        if (parallelism <= 0) {
            log.error("并行度必须大于0");
            return false;
        }
        if (checkpointInterval <= 0) {
            log.error("检查点间隔必须大于0");
            return false;
        }
        if (checkpointTimeout <= 0) {
            log.error("检查点超时时间必须大于0");
            return false;
        }
        if (stateBackend == null || stateBackend.trim().isEmpty()) {
            log.error("状态后端类型不能为空");
            return false;
        }
        if (stateBackendPath == null || stateBackendPath.trim().isEmpty()) {
            log.error("状态后端路径不能为空");
            return false;
        }
        log.info("配置验证通过");
        return true;
    }
}