from zhipuai import ZhipuAI
import uvicorn
from fastapi import FastAPI, Request
from typing import List, Dict
from fastapi.staticfiles import StaticFiles
from fastapi.middleware.cors import CORSMiddleware

# 智谱aiAPI连接
client = ZhipuAI(api_key="35730a173247ce94aeabb5197cd4a9f7.Lm7yEg01Zbyb3dcZ")
# 系统提示词
systemPrompt = "你是由北京建筑大学自研，基于昇腾设备进行离线推理的机器人小帕，是心理医生的好助手，当与用户进行交流时你会询问用户目前状态并根据回答进行追问。"
# fastapi实例化
app = FastAPI()

app.add_middleware(
    CORSMiddleware,
    allow_origins=['*'],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# 根据chathistory输入进message中
def messageBuild(chatHistory, msg):
    messages = [{"role": "system", "content": systemPrompt}]
    for userMsg, glmMsg in chatHistory:
        messages.append({"role": "user", "content": userMsg})
        messages.append({"role": "assistant", "content": glmMsg})
    messages.append({"role": "user", "content": msg})
    return messages

# glmapi调用
def responseBuild(responseMsg):
    response = client.chat.completions.create(
        model="glm-4",
        messages=responseMsg,
    )
    return response


messages = []
# fastapipost请求方法
@app.post("/post")
async def postllm(msg: Request):
    postMsg = await msg.json()
    print(postMsg)

    userMsg = postMsg.get('content', '')
    a = messageBuild(messages, userMsg)
    responseMsg = responseBuild(a)
    messages.extend([(userMsg, responseMsg.choices[0].message.content)])
    return {"response": responseMsg.choices[0].message.content, "chathistory": messages}


# 试运行
# messages = []
# while True:
#     In = input("msg:")
#     if In == 'exit':
#         break
#     a = messageBuild(messages, In)
#     res = responseBuild(a)
#     messages.extend([(In, res.choices[0].message.content)])
#     print(res.choices[0].message.content)
#     print(messages)
#

if __name__ == '__main__':
    uvicorn.run('llm:app', host="127.0.0.1", port=8001, reload=True)
