import os

from dotenv import find_dotenv, load_dotenv
from langchain_openai import ChatOpenAI

_ = load_dotenv(find_dotenv())


class LLMFactory:
    MODEL_CONFIG = {
        "moonshot": {
            "model": "kimi-k2-turbo-preview",
            "base_url": os.getenv("MOONSHOT_BASE_URL"),
            "api_key": os.getenv("MOONSHOT_API_KEY"),
        }
    }

    @classmethod
    def get_llm(cls, model_provider: str = None, temperature: float = None):
        if not model_provider:
            model_provider = "moonshot"

        if not temperature:
            temperature = 0.2

        if not (c := cls.MODEL_CONFIG.get(model_provider)):
            raise ValueError(f"Invalid model provider: {model_provider}")

        return ChatOpenAI(
            model=c["model"],
            temperature=temperature,
            base_url=c["base_url"],
            api_key=c["api_key"],
        )


if __name__ == "__main__":
    llm = LLMFactory.get_llm()
    print(llm.invoke("你好,你是谁呀").content)
