from typing import Optional, List


from langchain_core.callbacks import BaseCallbackHandler
from langchain_core.language_models import BaseChatModel
from langchain_openai import ChatOpenAI
from pydantic import Field, BaseModel


class BaseService(BaseModel):
    """基类"""
    api_base: str
    api_key: str

    def get_llm(self,
                model_name: str = Field(default="gpt-3.5-turbo-16k", alias="model"),
                temperature: float = 0.0,
                stream: bool = False,
                callback_manager: BaseCallbackHandler = None,
                max_tokens: Optional[int] = None,
                verbose: bool = False,
                request_timeout: int = 10
                ) -> BaseChatModel:

        return ChatOpenAI(
            openai_api_base=self.api_base,
            openai_api_key=self.api_key,
            model_name=model_name,
            temperature=temperature,
            streaming=stream,
            callback_manager=callback_manager,
            max_tokens=max_tokens,
            max_retries=3,
            # 请求超时单位秒
            request_timeout=request_timeout,
            verbose=verbose,
            cache=False
        )
