import uvicorn
from fastapi import FastAPI,Body
from fastapi.responses import JSONResponse
from typing import Dict
app = FastAPI()
from modelscope import AutoTokenizer, AutoModelForCausalLM, snapshot_download
model_dir = "E:/models/ZhipuAI/chatglm3-6b"           #直接提供chatGLM3的存储地址
tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(model_dir, trust_remote_code=True).quantize(4).cuda()  #.quantize(4).cuda()
model=model.eval()
history =[]
response,history = model.chat(tokenizer,'晚上睡不着如何办？',history=history)
print(response)
print(history)

@app.post("/chat")
def f1(data: Dict):
    query = data["query"]
    history = data["history"]
    if history == "":
        history = []

    response, history = model.chat(tokenizer, query, history=history, top_p=0.95, temperature=0.95)
    response = {"response": response,"history":history}
    return JSONResponse(content=response)

# if __name__ == "__main__":
#     uvicorn.run(app, host='127.0.0.1', port=7866)
