"""Agent 基类."""

from __future__ import annotations

import logging
from dataclasses import dataclass
from typing import Optional

from langchain_core.language_models import BaseLanguageModel
from langchain_openai import ChatOpenAI

from config.settings import get_settings


logger = logging.getLogger(__name__)


@dataclass
class AgentConfig:
    model: Optional[str] = None
    temperature: float = 0.0
    max_tokens: Optional[int] = None


class BaseETFAnalysisAgent:
    """所有 Agent 的基类."""

    def __init__(self, llm: Optional[BaseLanguageModel] = None, *, config: Optional[AgentConfig] = None) -> None:
        self._settings = get_settings()
        self._config = config or AgentConfig()
        self._llm = llm or self._build_llm()

    def _build_llm(self) -> BaseLanguageModel:
        model_name = self._config.model or self._settings.default_model
        if not self._settings.openai_api_key:
            logger.warning("未设置 OpenAI API Key，LangChain agent 将无法调用远程模型")
        init_kwargs: dict[str, object] = {
            "model": model_name,
            "temperature": self._config.temperature,
            "max_tokens": self._config.max_tokens,
            "openai_api_key": self._settings.openai_api_key,
        }
        if self._settings.api_base_url:
            init_kwargs["base_url"] = self._settings.api_base_url
        return ChatOpenAI(**init_kwargs)

    @property
    def llm(self) -> BaseLanguageModel:
        return self._llm

