from app.config import (
    DEEPINFRA_API_KEY,
    DEEPINFRA_BASE_URL,
    DEEPSEEK_API_KEY,
    DEEPSEEK_BASE_URL,
    VOLCES_API_KEY,
    VOLCES_BASE_URL,
)
from llm_generation.models.anthropic_official import AnthropicOfficial
from llm_generation.models.base import BaseModel
from llm_generation.models.deepseek import DeepSeek
from llm_generation.models.gemini import Gemini
from llm_generation.models.open_ai import OpenAI
from llm_generation.models.perplexity import Perplexity

# Define the static model dictionary at the module level
_MODEL_DICT = {
    "gpt-4o": OpenAI(model_name="gpt-4o"),
    "gpt-4o-mini": OpenAI(model_name="gpt-4o-mini"),
    "deepseek-ai/DeepSeek-V3": DeepSeek(
        model_name="deepseek-ai/DeepSeek-V3",
        base_url=DEEPINFRA_BASE_URL,
        api_key=DEEPINFRA_API_KEY,
    ),
    "deepseek-ai/DeepSeek-R1": DeepSeek(
        model_name="deepseek-ai/DeepSeek-R1",
        base_url=DEEPINFRA_BASE_URL,
        api_key=DEEPINFRA_API_KEY,
    ),
    "deepseek-reasoner": DeepSeek(
        model_name="deepseek-reasoner",
        base_url=DEEPSEEK_BASE_URL,
        api_key=DEEPSEEK_API_KEY,
    ),
    "deepseek-chat": DeepSeek(
        model_name="deepseek-chat",
        base_url=DEEPSEEK_BASE_URL,
        api_key=DEEPSEEK_API_KEY,
    ),
    "ft:gpt-4o-2024-08-06:auditgpt:aixbt-tone-test-2:Ae1oBOo3": OpenAI(
        model_name="ft:gpt-4o-2024-08-06:auditgpt:aixbt-tone-test-2:Ae1oBOo3"
    ),
    "o3-mini": OpenAI(model_name="o3-mini"),
    "sonar-reasoning-pro": Perplexity(model_name="sonar-reasoning-pro"),
    "sonar": Perplexity(model_name="sonar"),
    "deepseek-r1-250120": DeepSeek(
        model_name="deepseek-r1-250120",
        base_url=VOLCES_BASE_URL,
        api_key=VOLCES_API_KEY,
    ),
    "claude-3-7-sonnet-latest": AnthropicOfficial(
        model_name="claude-3-7-sonnet-latest"
    ),
    "gemini-2.5-pro-exp-03-25": Gemini(model_name="gemini-2.5-pro-exp-03-25"),
    "gemini-2.5-pro-preview-03-25": Gemini(model_name="gemini-2.5-pro-preview-03-25"),
    "o4-mini": OpenAI(model_name="o4-mini"),
    "o3": OpenAI(model_name="o3"),
}


def get_model(model_name: str) -> BaseModel:
    model = _MODEL_DICT.get(model_name)
    if model is None:
        raise ValueError(f"Model {model_name} not found")

    # Note: Depending on whether the model instances themselves maintain state
    # that should be unique per request, you might need to return a copy
    # or re-instantiate the model here instead of returning the shared instance.
    # For now, we assume the instances in the dict are safe to reuse.
    return model
