using System;
using System.Collections.Generic;
using System.Linq;

namespace CKY.AgentPlatform.ModelConfigs
{
    /// <summary>
    /// 模型提供商扩展类
    /// </summary>
    public static class ModelProviderExtensions
    {
        /// <summary>
        /// 获取模型提供商的默认配置
        /// </summary>
        /// <param name="provider">模型提供商</param>
        /// <returns>默认配置</returns>
        public static ModelInvocationConfig GetDefaultConfig(this ModelProvider provider)
        {
            return provider switch
            {
                ModelProvider.OpenAI => new ModelInvocationConfig
                {
                    ApiMode = ModelApiMode.OpenAICompatible,
                    NativeApiEndpoint = "https://api.openai.com/v1/chat/completions",
                    OpenAICompatibleEndpoint = "https://api.openai.com/v1/chat/completions",
                    EnableFallback = false,
                    MaxRetries = 3,
                    PreferredMode = ModelApiMode.OpenAICompatible,
                    Capabilities = ModelProviderCapabilities.Streaming |
                                  ModelProviderCapabilities.FunctionCalling |
                                  ModelProviderCapabilities.JsonMode |
                                  ModelProviderCapabilities.SystemPrompt |
                                  ModelProviderCapabilities.ToolCalling |
                                  ModelProviderCapabilities.Vision,
                    SupportedModels = new List<string>
                    {
                        "gpt-4", "gpt-4-turbo", "gpt-4o", "gpt-3.5-turbo", "gpt-3.5-turbo-16k"
                    }
                },

                ModelProvider.Anthropic => new ModelInvocationConfig
                {
                    ApiMode = ModelApiMode.Native,
                    NativeApiEndpoint = "https://api.anthropic.com/v1/messages",
                    EnableFallback = false,
                    MaxRetries = 3,
                    PreferredMode = ModelApiMode.Native,
                    Capabilities = ModelProviderCapabilities.Streaming |
                                  ModelProviderCapabilities.FunctionCalling |
                                  ModelProviderCapabilities.SystemPrompt,
                    SupportedModels = new List<string>
                    {
                        "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307"
                    }
                },

                ModelProvider.Google => new ModelInvocationConfig
                {
                    ApiMode = ModelApiMode.Native,
                    NativeApiEndpoint = "https://generativelanguage.googleapis.com/v1beta/models/",
                    EnableFallback = false,
                    MaxRetries = 3,
                    PreferredMode = ModelApiMode.Native,
                    Capabilities = ModelProviderCapabilities.Streaming |
                                  ModelProviderCapabilities.FunctionCalling |
                                  ModelProviderCapabilities.SystemPrompt |
                                  ModelProviderCapabilities.Vision,
                    SupportedModels = new List<string>
                    {
                        "gemini-1.5-pro-latest", "gemini-1.5-flash-latest", "gemini-1.0-pro"
                    }
                },

                ModelProvider.ZhipuAI => new ModelInvocationConfig
                {
                    ApiMode = ModelApiMode.Auto,
                    NativeApiEndpoint = "https://open.bigmodel.cn/api/paas/v4/chat/completions",
                    OpenAICompatibleEndpoint = "https://open.bigmodel.cn/api/paas/v4/chat/completions",
                    EnableFallback = true,
                    FallbackTimeoutMs = 10000,
                    MaxRetries = 3,
                    PreferredMode = ModelApiMode.OpenAICompatible,
                    Capabilities = ModelProviderCapabilities.Streaming |
                                  ModelProviderCapabilities.FunctionCalling |
                                  ModelProviderCapabilities.JsonMode |
                                  ModelProviderCapabilities.SystemPrompt |
                                  ModelProviderCapabilities.ToolCalling,
                    SupportedModels = new List<string>
                    {
                        "glm-4", "glm-4-air", "glm-4-airx", "glm-4-flash", "glm-3-turbo", "glm-4.5-airx"
                    }
                },

                ModelProvider.Baidu => new ModelInvocationConfig
                {
                    ApiMode = ModelApiMode.Native,
                    NativeApiEndpoint = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/",
                    EnableFallback = false,
                    MaxRetries = 3,
                    PreferredMode = ModelApiMode.Native,
                    Capabilities = ModelProviderCapabilities.Streaming |
                                  ModelProviderCapabilities.FunctionCalling,
                    SupportedModels = new List<string>
                    {
                        "ernie-4.0", "ernie-3.5", "ernie-speed", "ernie-tiny"
                    }
                },

                ModelProvider.Alibaba => new ModelInvocationConfig
                {
                    ApiMode = ModelApiMode.Native,
                    NativeApiEndpoint = "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation",
                    EnableFallback = false,
                    MaxRetries = 3,
                    PreferredMode = ModelApiMode.Native,
                    Capabilities = ModelProviderCapabilities.Streaming,
                    SupportedModels = new List<string>
                    {
                        "qwen-turbo", "qwen-plus", "qwen-max", "qwen-max-longcontext"
                    }
                },

                ModelProvider.Moonshot => new ModelInvocationConfig
                {
                    ApiMode = ModelApiMode.OpenAICompatible,
                    NativeApiEndpoint = "https://api.moonshot.cn/v1/chat/completions",
                    OpenAICompatibleEndpoint = "https://api.moonshot.cn/v1/chat/completions",
                    EnableFallback = false,
                    MaxRetries = 3,
                    PreferredMode = ModelApiMode.OpenAICompatible,
                    Capabilities = ModelProviderCapabilities.Streaming |
                                  ModelProviderCapabilities.FunctionCalling |
                                  ModelProviderCapabilities.JsonMode,
                    SupportedModels = new List<string>
                    {
                        "moonshot-v1-8k", "moonshot-v1-32k", "moonshot-v1-128k"
                    }
                },

                ModelProvider.Custom => new ModelInvocationConfig
                {
                    ApiMode = ModelApiMode.OpenAICompatible,
                    EnableFallback = false,
                    MaxRetries = 3,
                    PreferredMode = ModelApiMode.OpenAICompatible,
                    Capabilities = ModelProviderCapabilities.Streaming |
                                  ModelProviderCapabilities.FunctionCalling |
                                  ModelProviderCapabilities.JsonMode,
                    SupportedModels = new List<string>()
                },

                _ => new ModelInvocationConfig
                {
                    ApiMode = ModelApiMode.Native,
                    EnableFallback = false,
                    MaxRetries = 3,
                    Capabilities = ModelProviderCapabilities.None,
                    SupportedModels = new List<string>()
                }
            };
        }

        /// <summary>
        /// 获取模型提供商的显示名称
        /// </summary>
        /// <param name="provider">模型提供商</param>
        /// <returns>显示名称</returns>
        public static string GetDisplayName(this ModelProvider provider)
        {
            return provider switch
            {
                ModelProvider.OpenAI => "OpenAI",
                ModelProvider.Anthropic => "Anthropic Claude",
                ModelProvider.Google => "Google Gemini",
                ModelProvider.Baidu => "百度文心一言",
                ModelProvider.Alibaba => "阿里云通义千问",
                ModelProvider.ZhipuAI => "智谱AI GLM",
                ModelProvider.Moonshot => "月之暗面 Kimi",
                ModelProvider.Custom => "自定义模型",
                _ => provider.ToString()
            };
        }

        /// <summary>
        /// 获取模型提供商的描述
        /// </summary>
        /// <param name="provider">模型提供商</param>
        /// <returns>描述信息</returns>
        public static string GetDescription(this ModelProvider provider)
        {
            return provider switch
            {
                ModelProvider.OpenAI => "OpenAI GPT系列模型，支持多种功能包括工具调用、多模态等",
                ModelProvider.Anthropic => "Anthropic Claude系列模型，专注于安全性和对齐性",
                ModelProvider.Google => "Google Gemini系列模型，支持长上下文和多模态",
                ModelProvider.Baidu => "百度文心一言系列大模型，中文理解能力强",
                ModelProvider.Alibaba => "阿里云通义千问系列模型，支持多语言对话",
                ModelProvider.ZhipuAI => "智谱AI GLM系列模型，支持原生和OpenAI兼容两种API模式",
                ModelProvider.Moonshot => "月之暗面Kimi模型，支持长文本处理",
                ModelProvider.Custom => "用户自定义的模型配置",
                _ => "未知模型提供商"
            };
        }

        /// <summary>
        /// 检查模型提供商是否支持特定的API模式
        /// </summary>
        /// <param name="provider">模型提供商</param>
        /// <param name="apiMode">API模式</param>
        /// <returns>是否支持</returns>
        public static bool SupportsApiMode(this ModelProvider provider, ModelApiMode apiMode)
        {
            return provider switch
            {
                ModelProvider.OpenAI => apiMode == ModelApiMode.OpenAICompatible,
                ModelProvider.Anthropic => apiMode == ModelApiMode.Native,
                ModelProvider.Google => apiMode == ModelApiMode.Native,
                ModelProvider.ZhipuAI => apiMode == ModelApiMode.Native ||
                                      apiMode == ModelApiMode.OpenAICompatible ||
                                      apiMode == ModelApiMode.Auto,
                ModelProvider.Baidu => apiMode == ModelApiMode.Native,
                ModelProvider.Alibaba => apiMode == ModelApiMode.Native,
                ModelProvider.Moonshot => apiMode == ModelApiMode.OpenAICompatible,
                ModelProvider.Custom => apiMode == ModelApiMode.OpenAICompatible ||
                                      apiMode == ModelApiMode.Native ||
                                      apiMode == ModelApiMode.Custom,
                _ => false
            };
        }

        /// <summary>
        /// 获取模型提供商的推荐API模式
        /// </summary>
        /// <param name="provider">模型提供商</param>
        /// <returns>推荐的API模式</returns>
        public static ModelApiMode GetRecommendedApiMode(this ModelProvider provider)
        {
            return provider switch
            {
                ModelProvider.OpenAI => ModelApiMode.OpenAICompatible,
                ModelProvider.Anthropic => ModelApiMode.Native,
                ModelProvider.Google => ModelApiMode.Native,
                ModelProvider.ZhipuAI => ModelApiMode.Auto, // 优先尝试OpenAI兼容模式
                ModelProvider.Baidu => ModelApiMode.Native,
                ModelProvider.Alibaba => ModelApiMode.Native,
                ModelProvider.Moonshot => ModelApiMode.OpenAICompatible,
                ModelProvider.Custom => ModelApiMode.OpenAICompatible,
                _ => ModelApiMode.Native
            };
        }

        /// <summary>
        /// 获取智谱AI模型的特定配置
        /// </summary>
        /// <param name="modelName">模型名称</param>
        /// <returns>特定配置</returns>
        public static ModelInvocationConfig GetZhipuAIModelConfig(string modelName)
        {
            var baseConfig = ModelProvider.ZhipuAI.GetDefaultConfig();

            // 根据模型名称调整配置
            if (modelName.Contains("glm-4.5"))
            {
                // GLM4.5系列模型的特殊配置
                baseConfig.ModelSpecificParams["MaxContextLength"] = 128000;
                baseConfig.ModelSpecificParams["SupportsStreaming"] = true;
                baseConfig.ModelSpecificParams["SupportsFunctionCalling"] = true;
                baseConfig.PreferredMode = ModelApiMode.OpenAICompatible; // GLM4.5推荐使用OpenAI兼容模式
            }
            else if (modelName.Contains("glm-4"))
            {
                // GLM4系列模型的配置
                baseConfig.ModelSpecificParams["MaxContextLength"] = 128000;
                baseConfig.ModelSpecificParams["SupportsStreaming"] = true;
                baseConfig.ModelSpecificParams["SupportsFunctionCalling"] = true;
            }
            else if (modelName.Contains("glm-3"))
            {
                // GLM3系列模型的配置
                baseConfig.ModelSpecificParams["MaxContextLength"] = 32000;
                baseConfig.ModelSpecificParams["SupportsStreaming"] = true;
                baseConfig.ModelSpecificParams["SupportsFunctionCalling"] = false;
            }

            return baseConfig;
        }

        /// <summary>
        /// 创建智谱AI GLM4.5的推荐配置
        /// </summary>
        /// <param name="apiKey">API密钥</param>
        /// <param name="apiMode">API模式（可选，默认为Auto）</param>
        /// <returns>配置实例</returns>
        public static ModelInvocationConfig CreateGLM45Config(string apiKey, ModelApiMode apiMode = ModelApiMode.Auto)
        {
            return new ModelInvocationConfig
            {
                ApiMode = apiMode,
                NativeApiEndpoint = "https://open.bigmodel.cn/api/paas/v4/chat/completions",
                OpenAICompatibleEndpoint = "https://open.bigmodel.cn/api/paas/v4/chat/completions",
                EnableFallback = true,
                FallbackTimeoutMs = 10000,
                MaxRetries = 3,
                PreferredMode = ModelApiMode.OpenAICompatible,
                Capabilities = ModelProviderCapabilities.Streaming |
                              ModelProviderCapabilities.FunctionCalling |
                              ModelProviderCapabilities.JsonMode |
                              ModelProviderCapabilities.SystemPrompt |
                              ModelProviderCapabilities.ToolCalling,
                SupportedModels = new List<string> { "glm-4.5-airx", "glm-4.5", "glm-4.5-air" },
                ModelSpecificParams = new Dictionary<string, object>
                {
                    { "ApiKey", apiKey },
                    { "Provider", "ZhipuAI" },
                    { "MaxContextLength", 128000 },
                    { "SupportsStreaming", true },
                    { "SupportsFunctionCalling", true },
                    { "SupportsVision", false }
                },
                CustomHeaders = new Dictionary<string, string>
                {
                    { "Authorization", $"Bearer {apiKey}" }
                }
            };
        }

        /// <summary>
        /// 获取所有支持的模型列表
        /// </summary>
        /// <returns>模型列表</returns>
        public static Dictionary<ModelProvider, List<string>> GetAllSupportedModels()
        {
            return new Dictionary<ModelProvider, List<string>>
            {
                [ModelProvider.OpenAI] = new List<string>
                {
                    "gpt-4", "gpt-4-turbo", "gpt-4o", "gpt-3.5-turbo", "gpt-3.5-turbo-16k"
                },
                [ModelProvider.Anthropic] = new List<string>
                {
                    "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307"
                },
                [ModelProvider.Google] = new List<string>
                {
                    "gemini-1.5-pro-latest", "gemini-1.5-flash-latest", "gemini-1.0-pro"
                },
                [ModelProvider.ZhipuAI] = new List<string>
                {
                    "glm-4", "glm-4-air", "glm-4-airx", "glm-4-flash", "glm-3-turbo", "glm-4.5-airx", "glm-4.5"
                },
                [ModelProvider.Baidu] = new List<string>
                {
                    "ernie-4.0", "ernie-3.5", "ernie-speed", "ernie-tiny"
                },
                [ModelProvider.Alibaba] = new List<string>
                {
                    "qwen-turbo", "qwen-plus", "qwen-max", "qwen-max-longcontext"
                },
                [ModelProvider.Moonshot] = new List<string>
                {
                    "moonshot-v1-8k", "moonshot-v1-32k", "moonshot-v1-128k"
                }
            };
        }

        /// <summary>
        /// 验证模型名称是否有效
        /// </summary>
        /// <param name="provider">模型提供商</param>
        /// <param name="modelName">模型名称</param>
        /// <returns>是否有效</returns>
        public static bool IsValidModel(this ModelProvider provider, string modelName)
        {
            if (string.IsNullOrWhiteSpace(modelName))
                return false;

            var allModels = GetAllSupportedModels();
            return allModels.TryGetValue(provider, out var models) && models.Contains(modelName);
        }
    }
}