from flask import Flask, Response, stream_with_context
from flask import request
from flask_common import create_app
import time

from transformers import AutoTokenizer, AutoModel
import pynvml
pynvml.nvmlInit()
handle = pynvml.nvmlDeviceGetHandleByIndex(0)# 这里的0是GPU id

#MODEL_PATH = "/mnt/sda/lzx/projects/industry_llm/langchain-chatchat/xinference/models--THUDM--chatglm3-6b"
#TOKENIZER_PATH = "/mnt/sda/lzx/projects/industry_llm/langchain-chatchat/xinference/models--THUDM--chatglm3-6b"

MODEL_PATH = "/root/data/light/models--THUDM--chatglm3-6b"
TOKENIZER_PATH = "/root/data/light/models--THUDM--chatglm3-6b"

tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_PATH, trust_remote_code=True)
model = AutoModel.from_pretrained(MODEL_PATH, trust_remote_code=True, device_map="cuda:2").eval()


app = Flask(__name__)
create_app(app)

def generate(query):
    past_key_values, history = None, []
    start_time = time.time()
    current_length = 0
    total_tokens = 0
    for response, history, past_key_values in model.stream_chat(tokenizer, query, history=history, top_p=1,
                                                                temperature=0.01,
                                                                past_key_values=past_key_values,
                                                                return_past_key_values=True):
        end_time = time.time()
        elapsed_time = end_time - start_time
        tokens_per_second = total_tokens / elapsed_time
        meminfo = pynvml.nvmlDeviceGetMemoryInfo(handle)
        yield "data:" + str(response[current_length:]) + "&$&" + str(tokens_per_second) + "&$&" + str(meminfo.used / 1024 /1024) + "\n\n"
        total_tokens += len(tokenizer(response[current_length:], return_tensors='pt')['input_ids'][0])
        current_length = len(response)
    yield "data:[DONE]\n\n"

@app.route("/generate", methods=["GET"])
def stream_generate():
    query = request.args.get('query')
    return Response(stream_with_context(generate(query)), mimetype="text/event-stream")


@app.route("/clear", methods=["POST"])
def clear_history():
    past_key_values, history = None, []
    return "success"

if __name__=="__main__":
    app.run(debug=False, host="0.0.0.0", port=5004)