# -*- coding:utf-8 -*-

import uvicorn
from fastapi import FastAPI, Body
from fastapi.responses import JSONResponse
from typing import Dict
from modelscope import AutoTokenizer, AutoModel, snapshot_download

app = FastAPI()
model_dir = snapshot_download("ZhipuAI/chatglm3-6b", revision = "v1.0.0")        #直接提供chatGLM3的存储地址
tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True)
# model = AutoModel.from_pretrained(model_dir, trust_remote_code=True).half().quantize(4).cuda() #.cuda()  #
model = AutoModel.from_pretrained(model_dir, trust_remote_code=True).half().cpu().eval()

@app.post("/chat")
def f1(data: Dict):
    query = data["query"]
    history = data["history"]
    if history == "":
        history = []

    response, history = model.chat(tokenizer, query, history=history, top_p=0.95, temperature=0.95)

    response = {"response": response,"history":history}
    return JSONResponse(content=response)

if __name__ == "__main__":
    uvicorn.run(app, host='0.0.0.0', port=7866, reload=False)