#!/usr/bin/python3
# coding=utf-8
import openai

from flask import request, jsonify, Response,stream_with_context
from flask import Flask, request
import json
from args import *
from flask_cors import cross_origin
from data import get_data
from prompts import get_summary_prompt
client = openai.Client(
    api_key="cannot be empty",
    base_url=f"http://{xinference_url}:{xinference_port}/v1"
)
order_of_model = 1
def chat_model(prompt, order, model_uid=model_uid, max_tokens=max_tokens, temperature=temperature):

    response = client.chat.completions.create(
        model=f'{model_uid}_{order}',
        messages=[
            {
                "content": prompt,
                "role": "user",
            }
        ],
        max_tokens=max_tokens,
        temperature=temperature,
        stream=True
    )
    return response

app = Flask(__name__)
app.debug = False

def wrap_data(content: str):
    ret = {
        "choices": [
            {
                "delta": {
                    "content": content
                }
            }
        ]
    }
    return ret

def get_all_ids(data):
    clusters = [d[2] for d in data]
    return clusters

def get(dict_, k,  default):
    if k in dict_:
        return default if dict_[k] is None else dict_[k]
    return default

def event_stream(req):
    count = 0  # 计数器初始化为0
    global order_of_model
    area = req.get("area", "")
    type = req.get("type", "")
    data = get_data(start_date=req['startDate'], end_date=req['endDate'], sim=req.get("sim", 0.65), area=area, type=type)
    num_of_data = get(req, "clusterNum", max_show_data)
    max_events = min(num_of_data, len(data))
    ids_array = get_all_ids(data[:max_events])
    order_of_model = (order_of_model + 1) % num_of_model + 1
    temp_order_of_model = order_of_model
    while count < max_events:
        count_content = json.dumps(wrap_data(f"<br/>【热度：{data[count][1]}】"), ensure_ascii=False)
        yield f'data:{count_content}\n\n'
        prompt = get_summary_prompt(data[count][0])
        for r in chat_model(f"{prompt}", temp_order_of_model):
            choices = r.choices
            content = choices[0].delta.content
            yield f'data:{json.dumps(wrap_data(content), ensure_ascii=False)}\n\n'
        count_content = json.dumps(wrap_data(f"<br/>"), ensure_ascii=False)
        yield f'data:{count_content}\n\n'
        # 包装id
        ids_string = ",".join(ids_array[count])
        detail_str = f'<br/><sub onclick="subclick(\'{ids_string}\')" ids="{ids_string}">详情</sub>'
        detail_str = json.dumps(wrap_data(detail_str), ensure_ascii=False)
        count += 1
        yield f'data:{detail_str}\n\n'
    yield "data:[DONE]\n\n"


@app.route('/model/multi_summary', methods=['POST'])
@cross_origin()
def post_http_topics():
    req = request.get_json()
    return Response(stream_with_context(event_stream(req)), mimetype="text/event-stream")
    # 返回JSON数据。


if __name__ == '__main__':
    app.run(host='0.0.0.0', port=port)
    # 这里指定了地址和端口号。也可以不指定地址填0.0.0.0那么就会使用本机地址ip
