import os
from langchain_openai import ChatOpenAI
from langchain_core.messages import SystemMessage, HumanMessage  # 添加缺失的导入
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from fastapi import FastAPI
from langserve import add_routes

os.environ["LLM_MODEL"] = "Qwen/QwQ-32B"
os.environ["API_URL"] = "https://api.siliconflow.cn/v1/"
os.environ["API_KEY"] = "sk-wcsjgztdbpewpycuviarliglbfrcbpnxqluwncnvplmjnezy"

# 1.创建LLM对象
llm_obj = ChatOpenAI(
    openai_api_base=os.environ["API_URL"],
    openai_api_key=os.environ["API_KEY"],  
    model_name=os.environ["LLM_MODEL"], 
)

#2.准备prompt
# msg = [
#     SystemMessage(content="请将以下内容翻译成意大利语"),
#     HumanMessage(content="你好，请问你要去哪里？"),
# ]

# result = llm_obj.invoke(msg)

# 简单的解析响应数据
# print(result.content)
# 3.创建返回的数据解析器
parser = StrOutputParser()

# 定义提示模板
prompt_template = ChatPromptTemplate.from_messages([
    ("system", "请将以下内容翻译成{language})"),
    ("user", "{text}")
])


#4.得到链
chain = prompt_template | llm_obj | parser

#5.直接使用chain来调用
# print(chain.invoke(msg))
print(chain.invoke({'language': 'English', 'text': '我想读书'}))

# 把程序部署成服务
# 创建fastapi应用
app = FastAPI(title="我得LangChain服务",version="V1.0", description="使用LangChain翻译任何语句的服务")

add_routes(
    app,
    chain,
    path="/chainDemo"
)

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="localhost", port=8000)