from dotenv import load_dotenv

from domain.model.base import ChatModelManager
from domain.model.platform.deepseek import DeepseekChatModel, deepseek_chat_model
from domain.model.platform.google import google_chat_model, GoogleChatModel
from domain.model.platform.ollama import ollama_chat_model, OllamaChatModel

from domain.model.platform.zhipu import ZhipuChatModel, zhipu_chat_model
from infrastructure.utils import env

# 加载环境变量
load_dotenv()

# 全局模型管理器
CHAT_MODEL_MANAGER = ChatModelManager()

# 注册ollama支持的模型实例
CHAT_MODEL_MANAGER.register_platform_model(
    OllamaChatModel.PLATFORM_NAME,
    ollama_chat_model.init_model(
        'qwen2.5:1.5b',
        'deepseek-r1:1.5b',
        'deepseek-coder:1.3b',
        'llama2:latest',
        'gemma3:1b',
        'llama3.1:latest',
        'llama3.2:latest',
        'glm4:9b',
        'qwen2.5-coder:3b',
        base_url='http://127.0.0.1:11434',
        temperature=0,
        max_token=8192
    )
)
# 注册zhipu支持的模型实例
CHAT_MODEL_MANAGER.register_platform_model(
    ZhipuChatModel.PLATFORM_NAME,
    zhipu_chat_model.init_model(
        'glm-4',
        'glm-4-plus',
        'glm-4-long',
        'glm-4-flash-250414',
        api_key=env.get_env_by_key('ZHIPUAI_API_KEY'),
        temperature=0,
        max_token=8192,
    )
)

# 注册deepseek支持的模型实例
CHAT_MODEL_MANAGER.register_platform_model(
    DeepseekChatModel.PLATFORM_NAME,
    deepseek_chat_model.init_model(
        'deepseek-chat',
        'deepseek-reasoner',
        api_key=env.get_env_by_key('DEEPSEEK_API_KEY'),
        temperature=0,
        max_token=8192,
    )
)

# 注册google支持的模型实例
CHAT_MODEL_MANAGER.register_platform_model(
    GoogleChatModel.PLATFORM_NAME,
    google_chat_model.init_model(
        'gemini-2.5-pro-preview-05-06',
        'gemini-2.5-flash-preview-05-20',
        api_key=env.get_env_by_key('GOOGLE_API_KEY'),
        temperature=0,
        max_token=8192,
    )
)
