package com.datagateway.controller;

import com.datagateway.component.MultiDataSourceManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;

import java.util.HashMap;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;

/**
 * 多数据源控制器
 * 提供多数据源管理的API接口
 * 
 * @author Data Gateway Team
 * @version 1.0.0
 */
@RestController
@RequestMapping("/api/multi-datasource")
public class MultiDataSourceController {

    @Autowired
    private MultiDataSourceManager multiDataSourceManager;

    /**
     * 获取多数据源统计信息
     * 
     * @return 统计信息
     */
    @GetMapping("/statistics")
    public Map<String, Object> getStatistics() {
        Map<String, Object> result = new HashMap<>();
        
        try {
            MultiDataSourceManager.MultiDataSourceStatistics statistics = multiDataSourceManager.getStatistics();
            
            result.put("success", true);
            result.put("statistics", statistics);
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "获取多数据源统计信息失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 注册Kafka集群
     * 
     * @param request 集群注册请求
     * @return 操作结果
     */
    @PostMapping("/kafka/register")
    public Map<String, Object> registerKafkaCluster(@RequestBody KafkaClusterRegisterRequest request) {
        Map<String, Object> result = new HashMap<>();
        
        try {
            MultiDataSourceManager.KafkaClusterConfig config = new MultiDataSourceManager.KafkaClusterConfig(
                request.getClusterId(),
                request.getBootstrapServers(),
                request.getGroupId(),
                request.getAutoOffsetReset(),
                request.getMaxPollRecords(),
                request.getAdditionalProperties()
            );
            
            multiDataSourceManager.registerKafkaCluster(request.getClusterId(), config);
            
            result.put("success", true);
            result.put("clusterId", request.getClusterId());
            result.put("message", "Kafka集群注册成功");
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "注册Kafka集群失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 注册Hive实例
     * 
     * @param request 实例注册请求
     * @return 操作结果
     */
    @PostMapping("/hive/register")
    public Map<String, Object> registerHiveInstance(@RequestBody HiveInstanceRegisterRequest request) {
        Map<String, Object> result = new HashMap<>();
        
        try {
            MultiDataSourceManager.HiveInstanceConfig config = new MultiDataSourceManager.HiveInstanceConfig(
                request.getInstanceId(),
                request.getJdbcUrl(),
                request.getUsername(),
                request.getPassword(),
                request.getBatchSize(),
                request.getBatchTimeout(),
                request.getAdditionalProperties()
            );
            
            multiDataSourceManager.registerHiveInstance(request.getInstanceId(), config);
            
            result.put("success", true);
            result.put("instanceId", request.getInstanceId());
            result.put("message", "Hive实例注册成功");
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "注册Hive实例失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 添加数据源路由规则
     * 
     * @param request 路由规则请求
     * @return 操作结果
     */
    @PostMapping("/route-rule/add")
    public Map<String, Object> addRouteRule(@RequestBody RouteRuleRequest request) {
        Map<String, Object> result = new HashMap<>();
        
        try {
            MultiDataSourceManager.DataSourceRouteRule rule = new MultiDataSourceManager.DataSourceRouteRule(
                request.getRuleId(),
                request.getDescription(),
                request.getKafkaClusterId(),
                request.getHiveInstanceId(),
                request.getTopics(),
                request.getTables(),
                request.getConditions()
            );
            
            multiDataSourceManager.addRouteRule(request.getRuleId(), rule);
            
            result.put("success", true);
            result.put("ruleId", request.getRuleId());
            result.put("message", "数据源路由规则添加成功");
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "添加数据源路由规则失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 获取所有Kafka集群
     * 
     * @return Kafka集群列表
     */
    @GetMapping("/kafka/clusters")
    public Map<String, Object> getAllKafkaClusters() {
        Map<String, Object> result = new HashMap<>();
        
        try {
            Map<String, MultiDataSourceManager.KafkaClusterConfig> clusters = multiDataSourceManager.getAllKafkaClusters();
            
            result.put("success", true);
            result.put("clusters", clusters);
            result.put("clusterCount", clusters.size());
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "获取Kafka集群列表失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 获取所有Hive实例
     * 
     * @return Hive实例列表
     */
    @GetMapping("/hive/instances")
    public Map<String, Object> getAllHiveInstances() {
        Map<String, Object> result = new HashMap<>();
        
        try {
            Map<String, MultiDataSourceManager.HiveInstanceConfig> instances = multiDataSourceManager.getAllHiveInstances();
            
            result.put("success", true);
            result.put("instances", instances);
            result.put("instanceCount", instances.size());
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "获取Hive实例列表失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 获取所有路由规则
     * 
     * @return 路由规则列表
     */
    @GetMapping("/route-rules")
    public Map<String, Object> getAllRouteRules() {
        Map<String, Object> result = new HashMap<>();
        
        try {
            Map<String, MultiDataSourceManager.DataSourceRouteRule> rules = multiDataSourceManager.getAllRouteRules();
            
            result.put("success", true);
            result.put("rules", rules);
            result.put("ruleCount", rules.size());
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "获取路由规则列表失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 设置默认Kafka集群
     * 
     * @param request 默认集群设置请求
     * @return 操作结果
     */
    @PostMapping("/kafka/default")
    public Map<String, Object> setDefaultKafkaCluster(@RequestBody DefaultClusterRequest request) {
        Map<String, Object> result = new HashMap<>();
        
        try {
            multiDataSourceManager.setDefaultKafkaCluster(request.getClusterId());
            
            result.put("success", true);
            result.put("clusterId", request.getClusterId());
            result.put("message", "默认Kafka集群设置成功");
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "设置默认Kafka集群失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 设置默认Hive实例
     * 
     * @param request 默认实例设置请求
     * @return 操作结果
     */
    @PostMapping("/hive/default")
    public Map<String, Object> setDefaultHiveInstance(@RequestBody DefaultInstanceRequest request) {
        Map<String, Object> result = new HashMap<>();
        
        try {
            multiDataSourceManager.setDefaultHiveInstance(request.getInstanceId());
            
            result.put("success", true);
            result.put("instanceId", request.getInstanceId());
            result.put("message", "默认Hive实例设置成功");
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "设置默认Hive实例失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 移除Kafka集群
     * 
     * @param clusterId 集群ID
     * @return 操作结果
     */
    @DeleteMapping("/kafka/{clusterId}")
    public Map<String, Object> removeKafkaCluster(@PathVariable String clusterId) {
        Map<String, Object> result = new HashMap<>();
        
        try {
            multiDataSourceManager.removeKafkaCluster(clusterId);
            
            result.put("success", true);
            result.put("clusterId", clusterId);
            result.put("message", "Kafka集群移除成功");
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "移除Kafka集群失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 移除Hive实例
     * 
     * @param instanceId 实例ID
     * @return 操作结果
     */
    @DeleteMapping("/hive/{instanceId}")
    public Map<String, Object> removeHiveInstance(@PathVariable String instanceId) {
        Map<String, Object> result = new HashMap<>();
        
        try {
            multiDataSourceManager.removeHiveInstance(instanceId);
            
            result.put("success", true);
            result.put("instanceId", instanceId);
            result.put("message", "Hive实例移除成功");
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "移除Hive实例失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * 测试数据源路由
     * 
     * @param request 路由测试请求
     * @return 路由结果
     */
    @PostMapping("/route/test")
    public Map<String, Object> testRoute(@RequestBody RouteTestRequest request) {
        Map<String, Object> result = new HashMap<>();
        
        try {
            String kafkaCluster = multiDataSourceManager.routeToKafkaCluster(request.getTopic(), request.getData());
            String hiveInstance = multiDataSourceManager.routeToHiveInstance(request.getTable(), request.getData());
            
            result.put("success", true);
            result.put("topic", request.getTopic());
            result.put("table", request.getTable());
            result.put("kafkaCluster", kafkaCluster);
            result.put("hiveInstance", hiveInstance);
            result.put("timestamp", System.currentTimeMillis());
            
        } catch (Exception e) {
            result.put("success", false);
            result.put("message", "测试数据源路由失败: " + e.getMessage());
            result.put("timestamp", System.currentTimeMillis());
        }
        
        return result;
    }

    /**
     * Kafka集群注册请求类
     */
    public static class KafkaClusterRegisterRequest {
        private String clusterId;
        private String bootstrapServers;
        private String groupId;
        private String autoOffsetReset = "latest";
        private int maxPollRecords = 500;
        private Map<String, String> additionalProperties;

        // Getter和Setter方法
        public String getClusterId() { return clusterId; }
        public void setClusterId(String clusterId) { this.clusterId = clusterId; }
        
        public String getBootstrapServers() { return bootstrapServers; }
        public void setBootstrapServers(String bootstrapServers) { this.bootstrapServers = bootstrapServers; }
        
        public String getGroupId() { return groupId; }
        public void setGroupId(String groupId) { this.groupId = groupId; }
        
        public String getAutoOffsetReset() { return autoOffsetReset; }
        public void setAutoOffsetReset(String autoOffsetReset) { this.autoOffsetReset = autoOffsetReset; }
        
        public int getMaxPollRecords() { return maxPollRecords; }
        public void setMaxPollRecords(int maxPollRecords) { this.maxPollRecords = maxPollRecords; }
        
        public Map<String, String> getAdditionalProperties() { return additionalProperties; }
        public void setAdditionalProperties(Map<String, String> additionalProperties) { this.additionalProperties = additionalProperties; }
    }

    /**
     * Hive实例注册请求类
     */
    public static class HiveInstanceRegisterRequest {
        private String instanceId;
        private String jdbcUrl;
        private String username;
        private String password;
        private int batchSize = 1000;
        private int batchTimeout = 30000;
        private Map<String, String> additionalProperties;

        // Getter和Setter方法
        public String getInstanceId() { return instanceId; }
        public void setInstanceId(String instanceId) { this.instanceId = instanceId; }
        
        public String getJdbcUrl() { return jdbcUrl; }
        public void setJdbcUrl(String jdbcUrl) { this.jdbcUrl = jdbcUrl; }
        
        public String getUsername() { return username; }
        public void setUsername(String username) { this.username = username; }
        
        public String getPassword() { return password; }
        public void setPassword(String password) { this.password = password; }
        
        public int getBatchSize() { return batchSize; }
        public void setBatchSize(int batchSize) { this.batchSize = batchSize; }
        
        public int getBatchTimeout() { return batchTimeout; }
        public void setBatchTimeout(int batchTimeout) { this.batchTimeout = batchTimeout; }
        
        public Map<String, String> getAdditionalProperties() { return additionalProperties; }
        public void setAdditionalProperties(Map<String, String> additionalProperties) { this.additionalProperties = additionalProperties; }
    }

    /**
     * 路由规则请求类
     */
    public static class RouteRuleRequest {
        private String ruleId;
        private String description;
        private String kafkaClusterId;
        private String hiveInstanceId;
        private List<String> topics;
        private List<String> tables;
        private Map<String, Object> conditions;

        // Getter和Setter方法
        public String getRuleId() { return ruleId; }
        public void setRuleId(String ruleId) { this.ruleId = ruleId; }
        
        public String getDescription() { return description; }
        public void setDescription(String description) { this.description = description; }
        
        public String getKafkaClusterId() { return kafkaClusterId; }
        public void setKafkaClusterId(String kafkaClusterId) { this.kafkaClusterId = kafkaClusterId; }
        
        public String getHiveInstanceId() { return hiveInstanceId; }
        public void setHiveInstanceId(String hiveInstanceId) { this.hiveInstanceId = hiveInstanceId; }
        
        public List<String> getTopics() { return topics; }
        public void setTopics(List<String> topics) { this.topics = topics; }
        
        public List<String> getTables() { return tables; }
        public void setTables(List<String> tables) { this.tables = tables; }
        
        public Map<String, Object> getConditions() { return conditions; }
        public void setConditions(Map<String, Object> conditions) { this.conditions = conditions; }
    }

    /**
     * 默认集群设置请求类
     */
    public static class DefaultClusterRequest {
        private String clusterId;

        // Getter和Setter方法
        public String getClusterId() { return clusterId; }
        public void setClusterId(String clusterId) { this.clusterId = clusterId; }
    }

    /**
     * 默认实例设置请求类
     */
    public static class DefaultInstanceRequest {
        private String instanceId;

        // Getter和Setter方法
        public String getInstanceId() { return instanceId; }
        public void setInstanceId(String instanceId) { this.instanceId = instanceId; }
    }

    /**
     * 路由测试请求类
     */
    public static class RouteTestRequest {
        private String topic;
        private String table;
        private Object data;

        // Getter和Setter方法
        public String getTopic() { return topic; }
        public void setTopic(String topic) { this.topic = topic; }
        
        public String getTable() { return table; }
        public void setTable(String table) { this.table = table; }
        
        public Object getData() { return data; }
        public void setData(Object data) { this.data = data; }
    }
}
