﻿using Microsoft.Extensions.Options;

namespace OpenAIProxy;

public class OpenAIServiceSelector
{
    private readonly ConfigManager _configManager;
    private readonly ILogger<OpenAIServiceSelector> _logger;
    private readonly ProviderSelectionStrategy _strategy;
    private int _currentIndex;
    private readonly ITokenCounter _tokenCounter;
    
    public OpenAIServiceSelector(
        ConfigManager configManager,
        IOptions<ProxySettings> proxySettings,
        ILogger<OpenAIServiceSelector> logger,
        ITokenCounter tokenCounter)
    {
        _configManager = configManager;
        _logger = logger;
        _strategy = proxySettings.Value.ProviderSelectionStrategy;
        _currentIndex = new Random().Next(0, configManager.GetProviders().Count);
        _tokenCounter = tokenCounter;
    }
    public (ProviderConfig? Provider, ModelConfig? Model) SelectProvider(OpenAIChatRequest request)
    {
        var availableOptions = new List<(ProviderConfig Provider, ModelConfig Model)>();
        
        // 获取所有支持该模型的提供商
        foreach (var provider in _configManager.GetProviders())
        {
            if (provider.Models.TryGetValue(request.Model, out var modelConfig))
            {
                // 检查流式支持
                if ((request.Stream ?? false) && !modelConfig.StreamSupported)
                {
                    _logger.LogWarning(
                        $"提供商 {provider.Name} 不支持 {request.Model} 的流式传输");
                    continue;
                }
                
                // 检查Token限制
                var tokenSum  = request.Messages.Sum(m => _tokenCounter.CalculateTokens(m.Content));
                if (tokenSum > modelConfig.MaxTokens)
                {
                    _logger.LogWarning(
                        $"请求Token数 {tokenSum} 超过提供商 {provider.Name} 的模型 {request.Model} 限制 {modelConfig.MaxTokens}");
                    continue;
                }
                
                availableOptions.Add((provider, modelConfig));
            }
        }
        
        if (availableOptions.Count == 0)
        {
            _logger.LogError($"没有提供商支持模型: {request.Model}");
            return (null, null);
        }
        
        // 应用选择策略
        return _strategy switch
        {
            ProviderSelectionStrategy.RoundRobin => SelectRoundRobin(availableOptions),
            ProviderSelectionStrategy.LeastCost => SelectLeastCost(availableOptions),
            ProviderSelectionStrategy.BestPerformance => SelectBestPerformance(availableOptions),
            ProviderSelectionStrategy.WeightedRandom => SelectWeightedRandom(availableOptions),
            _ => availableOptions[0]
        };
    }
    
    private (ProviderConfig Provider, ModelConfig Model) SelectRoundRobin(
        List<(ProviderConfig, ModelConfig)> options)
    {
        _currentIndex = (_currentIndex + 1) % options.Count;
        return options[_currentIndex];
    }
    
    private (ProviderConfig Provider, ModelConfig Model) SelectLeastCost(
        List<(ProviderConfig, ModelConfig)> options)
    {
        return options.OrderBy(x => x.Item2.CostPerToken).First();
    }
    
    private (ProviderConfig Provider, ModelConfig Model) SelectBestPerformance(
        List<(ProviderConfig, ModelConfig)> options)
    {
        // 通常更贵的模型性能更好（简化逻辑）
        return options.OrderByDescending(x => x.Item2.CostPerToken).First();
    }
    
    private (ProviderConfig Provider, ModelConfig Model) SelectWeightedRandom(
        List<(ProviderConfig, ModelConfig)> options)
    {
        // 基于成本计算权重：成本越低权重越高
        var totalWeight = options.Sum(o => 1 / (double)Math.Max(0.000001m, o.Item2.CostPerToken));
        var randomValue = new Random().NextDouble() * totalWeight;
        double cumulative = 0;
        
        foreach (var (provider, model) in options)
        {
            var weight = 1 / (double)Math.Max(0.000001m, model.CostPerToken);
            cumulative += weight;
            if (cumulative >= randomValue)
            {
                return (provider, model);
            }
        }
        
        return options.Last();
    }
}