import os

from fastapi import FastAPI
from langchain.chains.summarize.map_reduce_prompt import prompt_template
from langchain_core.messages import SystemMessage, HumanMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import PromptTemplate, ChatPromptTemplate
from langchain_openai import ChatOpenAI
from langserve import add_routes, RemoteRunnable
from langchain_community.chat_models.tongyi import ChatTongyi
from langchain_core.runnables.history import RunnableWithMessageHistory

# langsmith 监控

# os.environ['LANGCHAIN_TRACING_V2'] = "true"
# os.environ['LANGCHAIN_API_KEY'] = '1123'

# 调用大模型

# model = ChatOpenAI(model='gpt-4-turbo')
from sqlalchemy import true

os.environ["DASHSCOPE_API_KEY"] = "sk-******"
api_key = os.getenv("DASHSCOPE_API_KEY")
if not api_key:
    raise ValueError("DASHSCOPE_API_KEY environment variable not set!")

model = ChatOpenAI(model='qwen-plus',
                   api_key=os.getenv("DASHSCOPE_API_KEY"),
                   base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
                   temperature=0.1)

msg = [
    SystemMessage(content='你是一个翻译专家，能将用户所有输入都翻译成英语'),
    HumanMessage(content='你好，请问你去哪里')
]
result = model.invoke(msg)
print(result.content)

# 多轮对话, 追加消息
while true:
    question = input("请输入内容：")
    if question == "exit":
        break
    msg.append(HumanMessage(content=question))
    result = model.invoke(msg)
    print(result.content)
    msg.append(result)

# 多轮对话，取后几条数据
model1 = ChatOpenAI(model='qwen-plus',
                    placeholder="多轮对话",
                    api_key=os.getenv("DASHSCOPE_API_KEY"),
                    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
                    temperature=0.1)

# #
# # #简单解析响应数据
# # parser =StrOutputParser()
# #
# # return_str=parser.invoke(result)
# #
# # print(return_str)
#
# # 得到链,直接使用chain
# # chain = model | parser
# # print(chain.invoke(msg))
# parser = StrOutputParser()
# prompt_template = ChatPromptTemplate.from_messages([
#     ('system', '请将下面的内容翻译成{language}'),
#     ('user', "{text}")
# ])
#
# chain = prompt_template | model | parser
#
# print(chain.invoke({'language': 'English', 'text': '不能去打球了'}))
#
# # 把我们的程序部署成服务，创建fastapi的应用
# app = FastAPI(title='my langchain service', version='1.0', description='use langchain fanyi')
#
# add_routes(
#     app,
#     chain,
#     path="/chainDemo",
# )
#
# if __name__ == '__main__':
#     import uvicorn
#
#     uvicorn.run(app, host="localhost", port=8000)
#
# # 通过访问http://127.0.0.1:8000/chainDemo/  POST
# # 参数：language：
# #     text
# # python脚本调用测试接口调用
# # if __name__ == '__main__':
# #     client = RemoteRunnable('http://localhost:8000/chainDemo')
# #     client.invoke({'language':'English','text':'nihao!'})
