# services/llm_service.py
from langchain_openai import ChatOpenAI
import threading
import os
os.environ["OPENAI_API_BASE"] = "https://api.siliconflow.cn"
class DeepSeekFactory:
    _llms = {}   # user_id -> LLM 实例
    _lock = threading.Lock()

    @classmethod
    def get_llm(cls, user_id: int, api_key: str):
        if user_id not in cls._llms:
            with cls._lock:
                if user_id not in cls._llms:
                    cls._llms[user_id] = ChatOpenAI(
                        model="Qwen/Qwen2.5-7B-Instruct",
                        temperature=0.7,
                        api_key=api_key,
                    )
        return cls._llms[user_id]
# 查询yuan和yuan3的聊天记录