package com.datagateway.component;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import javax.sql.DataSource;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;

/**
 * 多数据源管理器
 * 支持多个Kafka集群和Hive实例的管理
 * 
 * @author Data Gateway Team
 * @version 1.0.0
 */
@Component
public class MultiDataSourceManager {

    private static final Logger logger = LoggerFactory.getLogger(MultiDataSourceManager.class);

    @Autowired
    private AlertManager alertManager;

    @Autowired
    private SystemMonitor systemMonitor;

    /**
     * Kafka集群配置映射
     */
    private final ConcurrentHashMap<String, KafkaClusterConfig> kafkaClusters = new ConcurrentHashMap<>();

    /**
     * Hive实例配置映射
     */
    private final ConcurrentHashMap<String, HiveInstanceConfig> hiveInstances = new ConcurrentHashMap<>();

    /**
     * 数据源路由规则
     */
    private final ConcurrentHashMap<String, DataSourceRouteRule> routeRules = new ConcurrentHashMap<>();

    /**
     * 数据源统计信息
     */
    private final AtomicLong totalKafkaConnections = new AtomicLong(0);
    private final AtomicLong totalHiveConnections = new AtomicLong(0);
    private final AtomicLong totalRouteRequests = new AtomicLong(0);

    /**
     * 默认数据源配置
     */
    private String defaultKafkaCluster = "default";
    private String defaultHiveInstance = "default";

    /**
     * 注册Kafka集群
     * 
     * @param clusterId 集群ID
     * @param config 集群配置
     */
    public void registerKafkaCluster(String clusterId, KafkaClusterConfig config) {
        try {
            kafkaClusters.put(clusterId, config);
            totalKafkaConnections.incrementAndGet();
            
            logger.info("Kafka集群注册成功: {} -> {}", clusterId, config.getBootstrapServers());
            
            // 发送注册通知
            alertManager.sendSystemInfoAlert("Kafka集群注册", 
                String.format("集群ID: %s, 服务器: %s", clusterId, config.getBootstrapServers()));
            
        } catch (Exception e) {
            logger.error("注册Kafka集群失败: {}", clusterId, e);
            alertManager.sendSystemErrorAlert("Kafka集群注册失败", e.getMessage());
        }
    }

    /**
     * 注册Hive实例
     * 
     * @param instanceId 实例ID
     * @param config 实例配置
     */
    public void registerHiveInstance(String instanceId, HiveInstanceConfig config) {
        try {
            hiveInstances.put(instanceId, config);
            totalHiveConnections.incrementAndGet();
            
            logger.info("Hive实例注册成功: {} -> {}", instanceId, config.getJdbcUrl());
            
            // 发送注册通知
            alertManager.sendSystemInfoAlert("Hive实例注册", 
                String.format("实例ID: %s, JDBC URL: %s", instanceId, config.getJdbcUrl()));
            
        } catch (Exception e) {
            logger.error("注册Hive实例失败: {}", instanceId, e);
            alertManager.sendSystemErrorAlert("Hive实例注册失败", e.getMessage());
        }
    }

    /**
     * 添加数据源路由规则
     * 
     * @param ruleId 规则ID
     * @param rule 路由规则
     */
    public void addRouteRule(String ruleId, DataSourceRouteRule rule) {
        try {
            routeRules.put(ruleId, rule);
            
            logger.info("数据源路由规则添加成功: {} -> {}", ruleId, rule.getDescription());
            
        } catch (Exception e) {
            logger.error("添加数据源路由规则失败: {}", ruleId, e);
        }
    }

    /**
     * 根据规则路由到Kafka集群
     * 
     * @param topic 主题
     * @param data 数据
     * @return 集群ID
     */
    public String routeToKafkaCluster(String topic, Object data) {
        try {
            totalRouteRequests.incrementAndGet();
            
            // 查找匹配的路由规则
            for (DataSourceRouteRule rule : routeRules.values()) {
                if (rule.matches(topic, data)) {
                    String clusterId = rule.getKafkaClusterId();
                    if (kafkaClusters.containsKey(clusterId)) {
                        logger.debug("数据路由到Kafka集群: {} -> {}", topic, clusterId);
                        return clusterId;
                    }
                }
            }
            
            // 使用默认集群
            logger.debug("使用默认Kafka集群: {} -> {}", topic, defaultKafkaCluster);
            return defaultKafkaCluster;
            
        } catch (Exception e) {
            logger.error("Kafka集群路由失败: {}", topic, e);
            return defaultKafkaCluster;
        }
    }

    /**
     * 根据规则路由到Hive实例
     * 
     * @param table 表名
     * @param data 数据
     * @return 实例ID
     */
    public String routeToHiveInstance(String table, Object data) {
        try {
            totalRouteRequests.incrementAndGet();
            
            // 查找匹配的路由规则
            for (DataSourceRouteRule rule : routeRules.values()) {
                if (rule.matches(table, data)) {
                    String instanceId = rule.getHiveInstanceId();
                    if (hiveInstances.containsKey(instanceId)) {
                        logger.debug("数据路由到Hive实例: {} -> {}", table, instanceId);
                        return instanceId;
                    }
                }
            }
            
            // 使用默认实例
            logger.debug("使用默认Hive实例: {} -> {}", table, defaultHiveInstance);
            return defaultHiveInstance;
            
        } catch (Exception e) {
            logger.error("Hive实例路由失败: {}", table, e);
            return defaultHiveInstance;
        }
    }

    /**
     * 获取Kafka集群配置
     * 
     * @param clusterId 集群ID
     * @return 集群配置
     */
    public KafkaClusterConfig getKafkaClusterConfig(String clusterId) {
        return kafkaClusters.get(clusterId);
    }

    /**
     * 获取Hive实例配置
     * 
     * @param instanceId 实例ID
     * @return 实例配置
     */
    public HiveInstanceConfig getHiveInstanceConfig(String instanceId) {
        return hiveInstances.get(instanceId);
    }

    /**
     * 获取所有Kafka集群
     * 
     * @return 集群配置映射
     */
    public Map<String, KafkaClusterConfig> getAllKafkaClusters() {
        return new ConcurrentHashMap<>(kafkaClusters);
    }

    /**
     * 获取所有Hive实例
     * 
     * @return 实例配置映射
     */
    public Map<String, HiveInstanceConfig> getAllHiveInstances() {
        return new ConcurrentHashMap<>(hiveInstances);
    }

    /**
     * 获取所有路由规则
     * 
     * @return 路由规则映射
     */
    public Map<String, DataSourceRouteRule> getAllRouteRules() {
        return new ConcurrentHashMap<>(routeRules);
    }

    /**
     * 设置默认Kafka集群
     * 
     * @param clusterId 集群ID
     */
    public void setDefaultKafkaCluster(String clusterId) {
        if (kafkaClusters.containsKey(clusterId)) {
            this.defaultKafkaCluster = clusterId;
            logger.info("默认Kafka集群已设置: {}", clusterId);
        } else {
            logger.warn("Kafka集群不存在，无法设置为默认: {}", clusterId);
        }
    }

    /**
     * 设置默认Hive实例
     * 
     * @param instanceId 实例ID
     */
    public void setDefaultHiveInstance(String instanceId) {
        if (hiveInstances.containsKey(instanceId)) {
            this.defaultHiveInstance = instanceId;
            logger.info("默认Hive实例已设置: {}", instanceId);
        } else {
            logger.warn("Hive实例不存在，无法设置为默认: {}", instanceId);
        }
    }

    /**
     * 移除Kafka集群
     * 
     * @param clusterId 集群ID
     */
    public void removeKafkaCluster(String clusterId) {
        try {
            KafkaClusterConfig config = kafkaClusters.remove(clusterId);
            if (config != null) {
                logger.info("Kafka集群已移除: {}", clusterId);
                
                // 如果移除的是默认集群，重新设置默认集群
                if (clusterId.equals(defaultKafkaCluster) && !kafkaClusters.isEmpty()) {
                    String newDefault = kafkaClusters.keySet().iterator().next();
                    setDefaultKafkaCluster(newDefault);
                }
            }
        } catch (Exception e) {
            logger.error("移除Kafka集群失败: {}", clusterId, e);
        }
    }

    /**
     * 移除Hive实例
     * 
     * @param instanceId 实例ID
     */
    public void removeHiveInstance(String instanceId) {
        try {
            HiveInstanceConfig config = hiveInstances.remove(instanceId);
            if (config != null) {
                logger.info("Hive实例已移除: {}", instanceId);
                
                // 如果移除的是默认实例，重新设置默认实例
                if (instanceId.equals(defaultHiveInstance) && !hiveInstances.isEmpty()) {
                    String newDefault = hiveInstances.keySet().iterator().next();
                    setDefaultHiveInstance(newDefault);
                }
            }
        } catch (Exception e) {
            logger.error("移除Hive实例失败: {}", instanceId, e);
        }
    }

    /**
     * 获取多数据源统计信息
     * 
     * @return 统计信息
     */
    public MultiDataSourceStatistics getStatistics() {
        return new MultiDataSourceStatistics(
            kafkaClusters.size(),
            hiveInstances.size(),
            routeRules.size(),
            totalKafkaConnections.get(),
            totalHiveConnections.get(),
            totalRouteRequests.get(),
            defaultKafkaCluster,
            defaultHiveInstance,
            System.currentTimeMillis()
        );
    }

    /**
     * Kafka集群配置类
     */
    public static class KafkaClusterConfig {
        private final String clusterId;
        private final String bootstrapServers;
        private final String groupId;
        private final String autoOffsetReset;
        private final int maxPollRecords;
        private final Map<String, String> additionalProperties;

        public KafkaClusterConfig(String clusterId, String bootstrapServers, String groupId,
                                String autoOffsetReset, int maxPollRecords, Map<String, String> additionalProperties) {
            this.clusterId = clusterId;
            this.bootstrapServers = bootstrapServers;
            this.groupId = groupId;
            this.autoOffsetReset = autoOffsetReset;
            this.maxPollRecords = maxPollRecords;
            this.additionalProperties = additionalProperties != null ? additionalProperties : new ConcurrentHashMap<>();
        }

        // Getter方法
        public String getClusterId() { return clusterId; }
        public String getBootstrapServers() { return bootstrapServers; }
        public String getGroupId() { return groupId; }
        public String getAutoOffsetReset() { return autoOffsetReset; }
        public int getMaxPollRecords() { return maxPollRecords; }
        public Map<String, String> getAdditionalProperties() { return additionalProperties; }
    }

    /**
     * Hive实例配置类
     */
    public static class HiveInstanceConfig {
        private final String instanceId;
        private final String jdbcUrl;
        private final String username;
        private final String password;
        private final int batchSize;
        private final int batchTimeout;
        private final Map<String, String> additionalProperties;

        public HiveInstanceConfig(String instanceId, String jdbcUrl, String username, String password,
                                int batchSize, int batchTimeout, Map<String, String> additionalProperties) {
            this.instanceId = instanceId;
            this.jdbcUrl = jdbcUrl;
            this.username = username;
            this.password = password;
            this.batchSize = batchSize;
            this.batchTimeout = batchTimeout;
            this.additionalProperties = additionalProperties != null ? additionalProperties : new ConcurrentHashMap<>();
        }

        // Getter方法
        public String getInstanceId() { return instanceId; }
        public String getJdbcUrl() { return jdbcUrl; }
        public String getUsername() { return username; }
        public String getPassword() { return password; }
        public int getBatchSize() { return batchSize; }
        public int getBatchTimeout() { return batchTimeout; }
        public Map<String, String> getAdditionalProperties() { return additionalProperties; }
    }

    /**
     * 数据源路由规则类
     */
    public static class DataSourceRouteRule {
        private final String ruleId;
        private final String description;
        private final String kafkaClusterId;
        private final String hiveInstanceId;
        private final List<String> topics;
        private final List<String> tables;
        private final Map<String, Object> conditions;

        public DataSourceRouteRule(String ruleId, String description, String kafkaClusterId, String hiveInstanceId,
                                 List<String> topics, List<String> tables, Map<String, Object> conditions) {
            this.ruleId = ruleId;
            this.description = description;
            this.kafkaClusterId = kafkaClusterId;
            this.hiveInstanceId = hiveInstanceId;
            this.topics = topics != null ? topics : new ArrayList<>();
            this.tables = tables != null ? tables : new ArrayList<>();
            this.conditions = conditions != null ? conditions : new ConcurrentHashMap<>();
        }

        /**
         * 检查是否匹配路由规则
         * 
         * @param identifier 标识符（主题或表名）
         * @param data 数据
         * @return 是否匹配
         */
        public boolean matches(String identifier, Object data) {
            // 检查主题或表名匹配
            if (!topics.isEmpty() && !topics.contains(identifier)) {
                return false;
            }
            if (!tables.isEmpty() && !tables.contains(identifier)) {
                return false;
            }
            
            // 检查条件匹配
            for (Map.Entry<String, Object> condition : conditions.entrySet()) {
                // 这里可以实现更复杂的条件匹配逻辑
                // 简化实现：检查数据中是否包含指定字段
                if (data instanceof Map) {
                    Map<String, Object> dataMap = (Map<String, Object>) data;
                    if (!dataMap.containsKey(condition.getKey())) {
                        return false;
                    }
                }
            }
            
            return true;
        }

        // Getter方法
        public String getRuleId() { return ruleId; }
        public String getDescription() { return description; }
        public String getKafkaClusterId() { return kafkaClusterId; }
        public String getHiveInstanceId() { return hiveInstanceId; }
        public List<String> getTopics() { return topics; }
        public List<String> getTables() { return tables; }
        public Map<String, Object> getConditions() { return conditions; }
    }

    /**
     * 多数据源统计信息类
     */
    public static class MultiDataSourceStatistics {
        private final int kafkaClusterCount;
        private final int hiveInstanceCount;
        private final int routeRuleCount;
        private final long totalKafkaConnections;
        private final long totalHiveConnections;
        private final long totalRouteRequests;
        private final String defaultKafkaCluster;
        private final String defaultHiveInstance;
        private final long timestamp;

        public MultiDataSourceStatistics(int kafkaClusterCount, int hiveInstanceCount, int routeRuleCount,
                                       long totalKafkaConnections, long totalHiveConnections, long totalRouteRequests,
                                       String defaultKafkaCluster, String defaultHiveInstance, long timestamp) {
            this.kafkaClusterCount = kafkaClusterCount;
            this.hiveInstanceCount = hiveInstanceCount;
            this.routeRuleCount = routeRuleCount;
            this.totalKafkaConnections = totalKafkaConnections;
            this.totalHiveConnections = totalHiveConnections;
            this.totalRouteRequests = totalRouteRequests;
            this.defaultKafkaCluster = defaultKafkaCluster;
            this.defaultHiveInstance = defaultHiveInstance;
            this.timestamp = timestamp;
        }

        // Getter方法
        public int getKafkaClusterCount() { return kafkaClusterCount; }
        public int getHiveInstanceCount() { return hiveInstanceCount; }
        public int getRouteRuleCount() { return routeRuleCount; }
        public long getTotalKafkaConnections() { return totalKafkaConnections; }
        public long getTotalHiveConnections() { return totalHiveConnections; }
        public long getTotalRouteRequests() { return totalRouteRequests; }
        public String getDefaultKafkaCluster() { return defaultKafkaCluster; }
        public String getDefaultHiveInstance() { return defaultHiveInstance; }
        public long getTimestamp() { return timestamp; }
    }
}
