import os
from dotenv import load_dotenv
from openai import OpenAI
from langchain.chat_models import init_chat_model
from langchain_community.chat_models.tongyi import ChatTongyi
from langchain_ollama import ChatOllama

# 用openai包测试deepseek的apikey是否可用，这是最基础的用法，与langchain无关
def test_deepseek_apikey_by_openai(api_key):
    openai_client = OpenAI(api_key=api_key,base_url="https://api.deepseek.com")
    response = openai_client.chat.completions.create(
        model="deepseek-chat",
        messages=[
            {
                "role": "user",
                "content": "请用中文介绍一下你自己",
            },{
                "role": "system",
                "content": "你是一个乐于助人的AI助手",
            }])
    print(f"!!! OpenAI DeepSeek response: {response}, content:{response.choices[0].message.content}")

# 用langchain的api来创建与deepseek模型实现简单对话，在此之前需先安装langchain对指定模型的支持：pip3 install langchain-deepseek
def test_langchain_chat_model_with_deepseek():
    # 若使用OpenAI的gpt4o，则就是写为init_chat_model("gpt-4o-mini", model_provider="openai")，并安装pip3 install langchain-openai
    model = init_chat_model(model="deepseek-chat",model_provider="deepseek")
    user_query = "请用中文介绍一下你自己"
    result = model.invoke(user_query)
    print(f"!!! Langchain chat model deepseek response: {result}, content:{result.content}")


# 用openai包测试阿里云百炼的apikey是否可用，这是最基础的用法，与langchain无关
def test_bailiaan_apikey_by_openai(api_key):
    openai_client = OpenAI(
        api_key=api_key,
        base_url="https://dashscope.aliyuncs.com/compatible-mode/v1")
    response = openai_client.chat.completions.create(
        # 模型列表 https://help.aliyun.com/zh/model-studio/getting-started/models
        model="qwen-plus",
        messages=[
            {
                "role": "user",
                "content": "请用中文介绍一下你自己",
            },{
                "role": "system",
                "content": "你是一个乐于助人的AI助手",
            }])

    print(f"!!! OpenAI Qwen3 response: {response}, content:{response.choices[0].message.content}")

# 用langchain的api来创建与qwen3模型实现简单对话，在此之前需先安装langchain对指定模型的支持：pip3 install dashscope langchain-community
# 该包由langchain社区提供，因此需要安装社区包
def test_langchain_chat_model_with_qwen3():
    model = ChatTongyi()
    question = "你好，请你介绍一下你自己。"
    result = model.invoke(question)
    print(f"!!! Langchain chat model qwen3 response: {result}, content:{result.content}")


# 用langchain的api来创建与开源模型本地托管平台ollama对话，在此之前需先安装langchain对ollama的支持：pip3 install langchain-ollama
# ollama可以在本地运行多种开源的llm
def test_langchain_chat_model_with_ollama():
    model = ChatOllama(model="deepseek-r1",base_url="http://localhost:11434")
    question = "你好，请你介绍一下你自己。"
    result = model.invoke(question)
    print(f"!!! Langchain chat model ollama response: {result}, content:{result.content}")



if __name__ == '__main__':
    load_dotenv(override=True)
    DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
    DASHSCOPE_API_KEY = os.getenv("DASHSCOPE_API_KEY")
    print(f"DEEPSEEK_API_KEY: {DEEPSEEK_API_KEY}")
    print(f"DASHSCOPE_API_KEY: {DASHSCOPE_API_KEY}")
    test_deepseek_apikey_by_openai(api_key=DEEPSEEK_API_KEY)
    test_bailiaan_apikey_by_openai(api_key=DASHSCOPE_API_KEY)
    test_langchain_chat_model_with_deepseek() # 无需手动设置API_KEY, 确保环境变量中存在DEEPSEEK_API_KEY
    test_langchain_chat_model_with_qwen3() # 无需手动这只置API_KEY, 确保环境变量中存在DASHSCOPE_API_KEY
    test_langchain_chat_model_with_ollama()

