import os
from dotenv import load_dotenv
from langchain_community.chat_models.tongyi import ChatTongyi
from langchain_community.embeddings import DashScopeEmbeddings

load_dotenv()
qwen_key = os.getenv("DASHSCOPE_API_KEY") 
qwen_base_url = "https://dashscope.aliyuncs.com/compatible-mode/v1"


class ChatModelFactory:
    model_params = {
        "temperature": 0,
        "seed": 42
    }

    @classmethod
    def get_model(cls, model_name: str): 
        return ChatTongyi(model=model_name, model_server=qwen_base_url, api_key=qwen_key, **cls.model_params)

    @classmethod
    def get_default_model(cls):
        return cls.get_model("qwen-max")


class EmbeddingModelFactory:
    @classmethod
    def get_model(cls, model_name: str, use_azure: bool = False):
        if model_name == "text-embedding-v2":
            return DashScopeEmbeddings(model=model_name, dashscope_api_key=qwen_key)
        raise ValueError(f"Unsupported model name: {model_name}")
    

    @classmethod
    def get_default_model(cls):
        return cls.get_model("text-embedding-v2")


if __name__ == "__main__":
    embeddings =  EmbeddingModelFactory.get_model("text-embedding-v2")
    query_vector = embeddings.embed_query("如何集成百炼 Embedding？")
    print(query_vector)
